Compare commits

..

271 Commits

Author SHA1 Message Date
Jeremy Long
d529e88242 version 1.3.0 2015-08-04 21:19:19 -04:00
Jeremy Long
5d6ad8bc6c added gradle to the site 2015-08-04 14:07:34 -04:00
Jeremy Long
01769a6f38 added gradle to the site 2015-08-04 14:07:15 -04:00
Jeremy Long
a43cc38739 added logo 2015-08-04 12:07:11 -04:00
Jeremy Long
085069c2c7 added logo 2015-08-04 12:06:55 -04:00
Jeremy Long
816a6d057a updated so that *.sh files are marked as executable 2015-08-02 07:24:36 -04:00
Jeremy Long
19ab099f98 added 0755 to *.sh 2015-08-02 07:10:20 -04:00
Jeremy Long
99a1097953 making the world a better place - avoid the hate #287 2015-08-02 06:54:10 -04:00
Jeremy Long
673cf7018b initial gradle site documentation 2015-08-02 06:33:45 -04:00
Jeremy Long
624f52882e modified how generated keys were returned in an attempt to resolve issue #296 2015-08-02 06:28:05 -04:00
Jeremy Long
654ab4a289 added cleanup steps to remove temp resources, etc. 2015-08-01 07:01:50 -04:00
Jeremy Long
ec86dc5734 changed packaging 2015-08-01 07:01:05 -04:00
Jeremy Long
44f37c12c3 checkstyle corrections 2015-08-01 06:49:55 -04:00
Jeremy Long
d0b704d7f4 corrected data directory path when running test cases 2015-08-01 06:08:35 -04:00
Jeremy Long
7452942091 Merge pull request #297 from dwvisser/broken-cli-docs-link
Fixed broken link to CLI instructions in main project README.
2015-08-01 05:52:00 -04:00
Dale Visser
ea4ea680a2 Fixed broken link to CLI instructions in main project README. 2015-07-31 10:28:55 -04:00
Jeremy Long
ac7ae3d8df disabled test case for JDK6 to resolve issue #294 2015-07-31 05:28:53 -04:00
Jeremy Long
4fa5f2ac72 reverted plugin-plugin version to resolve issue #294 2015-07-31 05:28:18 -04:00
Jeremy Long
8b1a44fe42 Merge pull request #295 from colezlaw/issue-294
Removed i18n for SLF4J logging as it was preventing build on jdk1.6
2015-07-30 05:35:46 -04:00
Will Stranathan
c4d26f9194 Removed i18n for SLF4J logging as it was preventing build on jdk1.6 2015-07-29 18:29:09 -04:00
Jeremy Long
79b59f2aae checkstyle/pmd/findbugs recommended updates 2015-07-29 07:00:30 -04:00
Jeremy Long
1eecd13ea7 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2015-07-28 06:33:52 -04:00
Jeremy Long
1ae3a63f5c final tested patch for issue #285 2015-07-28 06:33:40 -04:00
Jeremy Long
561694a991 Merge pull request #292 from willis7/master
Fix bug introduced with #282
2015-07-28 06:07:48 -04:00
Jeremy Long
096af016ef documented shallow clone per issue #283 2015-07-28 05:40:08 -04:00
Sion Williams
498835015a outputDirectory should have been called using the method rather than directly calling the property. This now fixes the failing integration test in the last commit. 2015-07-27 22:33:39 +01:00
Sion Williams
cafa0d6578 Integration test spec proves outputDirectory value is not being honoured when changed using dsl. 2015-07-27 22:03:19 +01:00
Jeremy Long
5444253ed6 added more CVSS details per issue #154 2015-07-27 06:56:23 -04:00
Jeremy Long
d0ae12a167 working patch for issue #285 2015-07-27 06:53:50 -04:00
Jeremy Long
e323c7f810 patch for issue #285 2015-07-25 07:18:33 -04:00
Jeremy Long
dd3758af43 manually merged PR #270
Former-commit-id: 9d4504942f229a7462d59b292e478ea2289ecfed
2015-07-24 06:44:54 -04:00
Jeremy Long
94ae6e76f1 manually merged PR #282
Former-commit-id: cf2ae5572602f7258b4c617097ab003fe3f644c6
2015-07-24 05:50:55 -04:00
Jeremy Long
ee969a5ed9 manually merged PR #278
Former-commit-id: 7811e5832e680589e5b1734e775ee8cab92dd30f
2015-07-24 05:36:32 -04:00
Jeremy Long
a547a219a4 grammar police caught me (#285)
Former-commit-id: 83a94b573aea686ba8cc8c6620e1495c46cd425b
2015-07-23 05:16:39 -04:00
Jeremy Long
d4eba634ea reduced logging levels in test as build issue was resolved
Former-commit-id: e1c8cd3ef9f19d5c0d17dc59d6feed6474118602
2015-07-22 06:45:17 -04:00
Jeremy Long
0927897451 documented argument for new config option to limit symbolic link depth per #285
Former-commit-id: cf8775d8d539c37e68f7c34692aa5c249b4cef26
2015-07-22 06:44:51 -04:00
Jeremy Long
63e5a2c5ba initial patch to add symbolic link depth per issue #285; more testing needs to be performed
Former-commit-id: bc40f8cfc1410c46e402ce6931e53f377b5c60ee
2015-07-22 06:43:52 -04:00
Jeremy Long
a7f3f1d806 initial patch to add symbolic link depth per issue #285; more testing needs to be performed
Former-commit-id: f0fb24c29703f7ec390f94560a224adf936f898c
2015-07-22 06:43:41 -04:00
Jeremy Long
97d3a2986c corrected merge conflict
Former-commit-id: 5653364b7053ae119dd2d0a604258ccd80f4a061
2015-07-21 07:06:11 -04:00
Jeremy Long
d3b20757ef changed to BaseDBTest instead of BaseTest to reesolve build errors per issue #273
Former-commit-id: 392b26cac4595d6dd6c9a4ffbd2d76f851c8c7ed
2015-07-21 07:04:39 -04:00
Jeremy Long
d99804f14e Merge pull request #284 from dwvisser/remove-dependency-extension-property
Remove dependency file extension property

Former-commit-id: 4f21b07f05c0ff74918d1394afaa7cedd693ecf0
2015-07-20 05:35:49 -04:00
Dale Visser
1b8dc71980 Merge branch 'upmaster' into remove-dependency-extension-property
Former-commit-id: b06adaf9fa3031c27be08523b9689ae58d0cc322
2015-07-19 08:06:43 -04:00
Jeremy Long
dc466f1480 set objects to null after closing them
Former-commit-id: 3d62a1b66741d69730e39413b8d99b670d744b50
2015-07-19 06:21:30 -04:00
Jeremy Long
ba6a783834 update to close to set objects to null
Former-commit-id: 9c1caaeca9ca0a472180eb6bf4bb7a7c7fba6e6e
2015-07-19 05:49:06 -04:00
Jeremy Long
b2edf5683c minor reformat
Former-commit-id: b5431379bfd281a25a11cd17c203bfbe294c55a8
2015-07-19 05:48:27 -04:00
Jeremy Long
dacb91b9a8 cleaned up test cases to properly close the DB
Former-commit-id: 7333e882aebfe54b94a4d70cdb55ca2fbd3f3c51
2015-07-17 15:31:00 -04:00
Jeremy Long
7ac71a7b2a additional logging
Former-commit-id: 6c91f3482dfe9980d33b1b8ac6c43c6988c08e60
2015-07-17 15:30:31 -04:00
Jeremy Long
4b44bb5426 modified test case
Former-commit-id: 5499e7a8021b0b24d4db640e1381a94d9c02dc0c
2015-07-17 15:29:57 -04:00
Jeremy Long
8cd68c7c16 improved logging
Former-commit-id: 5b6741a1d561cbf4c20ef98907ce99a2b245b42e
2015-07-17 08:45:43 -04:00
Jeremy Long
0ae228d6f8 added try/catch to tests to correctly close the db
Former-commit-id: 8f71f57a7724340a8526a35bd0e42748f02530c5
2015-07-17 08:45:33 -04:00
Jeremy Long
6a2ed23822 switched to debug logging in test
Former-commit-id: 4d42f08ac2fdc58ce1eeb4ab3f27d1efa72a6a46
2015-07-17 08:24:33 -04:00
Jeremy Long
fe0035fe0e fixed namespaces
Former-commit-id: 8f6f3361021b2efc1843ae93ed1bab44e5f053a4
2015-07-16 07:00:17 -04:00
Jeremy Long
aeabaf8513 fixed failing test due to renaming the schema
Former-commit-id: 2cbc4b84cb28e72c163cde3b2d1e5f8c66ae5b42
2015-07-16 06:51:40 -04:00
Jeremy Long
43907e07c2 Merge pull request #279 from dwvisser/fix-properties-file
Eliminated duplicate key in dependencycheck.properties file

Former-commit-id: a5fcb23d750c366340c8a6af801b9ac6e9ac19b8
2015-07-15 08:20:27 -04:00
Jeremy Long
2413dc9a41 corrected package
Former-commit-id: 3d7e08e0730cb6c03eb43221dc77afc3b1a3c2d3
2015-07-15 08:18:02 -04:00
Dale Visser
2d92c9d240 Commented out first instance of cpe.url, and moved 2nd instance up. Assumption: the 2nd value was being used.
Former-commit-id: 2cae0ca086b2fb666d883dc1a3fbcb174465aa2b
2015-07-14 18:00:45 -04:00
Jeremy Long
a24813b678 updated schema and xml report to include the confidence and type of evidence
Former-commit-id: 774764585a15d8d78a615f20f91c3a8aaaf4abb2
2015-07-14 08:01:22 -04:00
Jeremy Long
dffb2887d6 added task to copy xsd to the site
Former-commit-id: ec6bffd48bc2b98cb5b992ebcc0862e89897c255
2015-07-14 08:00:42 -04:00
Jeremy Long
68f1c1a54c changed display code so that all evidence is shown instead of just the used evidence
Former-commit-id: e01f14f244960ddccbd859bf50c0603abe5170d1
2015-07-14 07:48:07 -04:00
Jeremy Long
726aa7b894 changed tooltip header on evidence count
Former-commit-id: fdf92f87a81d985fd01b19d5ce04517c81608a62
2015-07-14 07:41:19 -04:00
Jeremy Long
44c795cd4f patched to resolve issue #261
Former-commit-id: 44ace36f4a02885134a0af0fb44d11d351d8c7f6
2015-07-11 07:15:08 -04:00
Jeremy Long
4d5d46d08a Merge pull request #274 from dwvisser/openssl-source-analyzer
OpenSSL source analyzer

Former-commit-id: cc2f02f3722b7480f0ec5f7979892b78dc4076d1
2015-07-11 06:29:55 -04:00
Jeremy Long
52cdff14bd minor update
Former-commit-id: 074158f04f25d94c003f970a0d7c4b4c0180fc0b
2015-07-11 06:04:55 -04:00
Jeremy Long
0372167f25 updated documentation to resolve issue #268
Former-commit-id: 78f6158c5c053a7595dd2dc702c015c257a2a00f
2015-07-11 06:02:46 -04:00
Dale Visser
005e401c7f Remove fileExtension property from Dependency class.
Former-commit-id: fc6303c6c835724fe61f882a9df5e2247c7a9b3e
2015-07-10 13:48:27 -04:00
Jeremy Long
479212dd60 Merge branch 'master' of github.com:jeremylong/DependencyCheck
Former-commit-id: 2e6f7993d6389db4bf7957928ae4772a9cc54887
2015-07-10 07:10:34 -04:00
Jeremy Long
0af9239906 added CVE URL arguments per feature request #268
Former-commit-id: a33128070eb9bcc365de1788ebc84c187731568d
2015-07-10 07:10:13 -04:00
Jeremy Long
13d871ab21 Merge pull request #277 from hmolsen/patch-1
fix broken link to suppression site

Former-commit-id: cc663d3a7fdbe4fc952cfa255181ef505f2fe2ce
2015-07-10 06:44:22 -04:00
Jeremy Long
136830ce22 cleaned up logging
Former-commit-id: d9614a2a309e17ceab628b2258f78da0b792a5a8
2015-07-10 06:13:41 -04:00
Jeremy Long
afe8e343b1 test code to run on cloudbees to debug a build issue
Former-commit-id: b3bba62d28666160682ea15813ea80411f72f9a0
2015-07-10 05:56:55 -04:00
hmolsen
f7c2cdff9b fix broken link to suppression site
Former-commit-id: e6c6a4a990da7cd9360654a7b9d12fa15d2d4dd5
2015-07-10 10:33:59 +02:00
Dale Visser
a7abe97ca0 OpenSSL: Forgot to 'git add' new files in previous commit.
Former-commit-id: 7219edc255d239995d5bbb5cbe587e999a7160a8
2015-07-09 17:13:03 -04:00
Dale Visser
5354137c76 OpenSSL: Site doc additions, including about Autoconf analyzer.
Former-commit-id: 35253cfbf5cfcf04b2f3fe39f0891cd0bf1155b4
2015-07-09 17:11:06 -04:00
Dale Visser
433c2e5916 OpenSSL: Updates to use FileFilter instead of file extension.
Former-commit-id: 974bc28171b5b3ac3e772c93447489e1afb797f1
2015-07-09 16:35:04 -04:00
Dale Visser
e5fdda60fc Merge branch 'upmaster' into openssl-source-analyzer
Conflicts:
	dependency-check-core/src/test/java/org/owasp/dependencycheck/analyzer/PythonPackageAnalyzerTest.java

Former-commit-id: 6d92982227ad0ff7c4381d03eb1bf542dfe7697f
2015-07-09 16:14:54 -04:00
Jeremy Long
b227cf890b checkstyle recommended updates
Former-commit-id: 17e3e4580553c07b33533f1e9f6cb5f33177f78e
2015-07-09 07:07:36 -04:00
Jeremy Long
0a4c3102dd reverted changes as a schema upgrade is not needed at this time
Former-commit-id: 6f97fef5748a3d630a73f790cca8baa4753e1d7a
2015-07-09 07:07:10 -04:00
Jeremy Long
54094ebc21 removed unfinished JavaScriptAnalyzer
Former-commit-id: 9d7d7e82da91cea21ea87b8d933093e738571ba2
2015-07-09 06:37:29 -04:00
Jeremy Long
bf3fe6404a reverted changes as a schema upgrade is not needed at this time
Former-commit-id: 22671cb1c7e2e6db26f704f12b5fb86100586221
2015-07-09 06:18:30 -04:00
Jeremy Long
58888ac389 Merge pull request #271 from wmaintw/master
Add ability to dependency-check-gradle plugin for customize CVE urls, report output directory and some bug fixing

Former-commit-id: bd78214d389ae52fab0c0280910469289d7b699c
2015-07-09 06:02:17 -04:00
Jeremy Long
07df8ecc02 Merge pull request #275 from dwvisser/filetype-analyzer-pr
Modify AbstractFileTypeAnalyzer to use FileFilter instead of just file extensions

Former-commit-id: ea2d82e9203f3d3556f7621960bb1e35d839db95
2015-07-09 05:51:37 -04:00
Jeremy Long
c86becb169 Merge pull request #276 from colezlaw/homebrew-instructions
Added homebrew instructions

Former-commit-id: 07b4ce33f2b1e96556761c1e31878180f23dc41e
2015-07-09 05:22:17 -04:00
Will Stranathan
98a43606ce Added homebrew instructions
Former-commit-id: 05167f240768a8136e580544875454b3f2fdbdfe
2015-07-08 22:22:28 -04:00
ma wei
5b2353e612 add license information for these gradle script files
Former-commit-id: 1ecf5c52dc82e1fd55172385f303dc5022bad801
2015-07-09 10:20:12 +08:00
Dale Visser
a3ad598004 OpenSSL: Untested changes to get command-line working.
Former-commit-id: f81a410bba6cbc1b71cb7d5fef121eb4f52a780b
2015-07-08 14:07:17 -04:00
Jeremy Long
c02d1d73b8 Merge branch 'hansjoachim-reporting'
Former-commit-id: 9612b8a07e11bfa93d94b71d079bc98d28a73b87
2015-07-08 06:59:11 -04:00
Jeremy Long
77c99e1d7c Merge branch 'reporting' of https://github.com/hansjoachim/DependencyCheck into hansjoachim-reporting
Former-commit-id: 272d25200c1635371806df3bc1d7ad7754cba475
2015-07-08 06:48:14 -04:00
Jeremy Long
055d34818a updated documentation to resolve issue #269
Former-commit-id: ae4668c9182f181f1c01c17e0737b892fe8078ba
2015-07-08 06:39:37 -04:00
Jeremy Long
3a06503b74 commented out test case
Former-commit-id: c62a28c8a39c42e6c458387c03445c86deb6454c
2015-07-08 06:32:52 -04:00
Jeremy Long
cae15a8d7a updated javadoc
Former-commit-id: 264561aed2a9e138d97c4f06c5e27df5b8acf7e9
2015-07-08 06:32:20 -04:00
Jeremy Long
70554d1158 removed Cpe Updater as it will not currently be used
Former-commit-id: 2d9d5a2082dc386697557943e46399c31633ffbc
2015-07-08 06:31:41 -04:00
Jeremy Long
db42da14d1 updated documentation
Former-commit-id: f4b200a38c5df345cd5714c4dbf624af57b40481
2015-07-08 06:31:08 -04:00
Jeremy Long
9daa9b6cca reorganized code, made the database file name version independent so that upgrades can be made, and implemented a CPE updates per issue #149
Former-commit-id: fdb57afa28ecffdb7ca90971851844718ecb8bb9
2015-07-08 06:20:19 -04:00
Dale Visser
30087b5e79 OpenSSL: Minor code edits/refactoring.
Former-commit-id: 5c0247772406b55a0e1e7219b83389a5ed0605f4
2015-07-07 17:48:50 -04:00
Dale Visser
c214b70459 OpenSSL: Analyzer unit test passes.
Former-commit-id: 06d36762375a5fc55bc7f30a7857713ce332fceb
2015-07-07 17:37:31 -04:00
Dale Visser
ad0b6c28ba Added long hexadecimal to version string conversion function, and passing unit test.
Former-commit-id: 846e2a3c07016974c396770397304875fd36b7f6
2015-07-07 16:48:38 -04:00
Dale Visser
8127dc2620 OpenSSL: Initial skeleton of OpenSSLAnalyzer and unit test suite. Fixed package imports in PythonPackageAnalyzerTest.java
Former-commit-id: b27fa07cde451c0a19e5ce3f2d46701f02dda47e
2015-07-07 15:51:33 -04:00
ma wei
fed5d3efc0 add read-me info about customize report output directory
Former-commit-id: be8ba701e31f0c90fd686e90ebbf752b96665521
2015-07-07 14:08:48 +08:00
ma wei
ed3c806869 update the version of dependency-check-gradle to 0.0.5
Former-commit-id: facbe13d3bd077b5540e8c20d6b3b1bb3d1b4e3f
2015-07-07 13:57:15 +08:00
ma wei
b8a32eb086 add ability for customize report output directory
Former-commit-id: fdf53b2768ecd38f16de676ddd35e63fcd1cfad0
2015-07-07 13:45:55 +08:00
ma wei
e537ce155e upgrade the version of dependency check utils
Former-commit-id: 34ef2d92888cf2e108cc0759fd1ba04fb795abe3
2015-07-07 13:45:21 +08:00
Dale Visser
be506964b0 Fixed javadoc. Some reformatting of FileFilterBuilder.
Former-commit-id: 66a81beb1f1361b16743a762f6941dfef626ca4a
2015-07-06 14:16:44 -04:00
Dale Visser
df39b490f5 Made extension file filtering case-insensitive.
Former-commit-id: e7bc80227edcf0d4fb9503eb39d43260b7db028e
2015-07-06 13:50:00 -04:00
Dale Visser
e3256e4bb9 Removed unused imports from AbstractFileTypeAnalyzer.
Former-commit-id: 79af85c863a4b64a113f88c3bab75ba038bcec0b
2015-07-06 13:44:08 -04:00
Dale Visser
4d01d636cc Changed AbstractFiletypeAnalyzer to expose getFileFilter() instead of getSupportedExtensions(), and
refactored existing implementations to return a FileFilter instance. The new FileFilterBuilder class
encapsulates building of a filter that can consider the full filename or other attributes, in addition
to file extension.


Former-commit-id: 9c968c77cc2285d571d38b1a8486d05b09b12aa4
2015-07-06 13:35:49 -04:00
Hans Joachim Desserud
1bf07036e8 Merge branch 'master' into reporting
Conflicts:
	dependency-check-cli/pom.xml

Former-commit-id: f381a7191effbe65f852c76e5e19033e4a264171
2015-07-06 18:36:49 +02:00
Jeremy Long
da058fcaf5 corrected naming of cpe part update
Former-commit-id: 1e7e29ac28d576c495f96713277eaa7c1b705cfc
2015-07-05 06:24:55 -04:00
Jeremy Long
7203c91c70 corrected naming of cpe part update
Former-commit-id: aaac06bda3480c23d663f25b1feac34dc57032b9
2015-07-05 06:09:17 -04:00
Jeremy Long
44893a2a2c added CPE entries to patch issue #149
Former-commit-id: 472d5456e6a8a9d569908ce8da4f64d05afa96c7
2015-07-04 06:40:39 -04:00
Jeremy Long
bdf2ca6e1d added CPE entries to patch issue #149
Former-commit-id: 2bd88a3823d5889ad12952fc3f344e69d571ad05
2015-07-04 06:39:54 -04:00
Jeremy Long
621ac3b6ec changed logging level on one delete temp file log entry
Former-commit-id: 2f73b9a97a641bb86b73db76dc82d5e84cde9e17
2015-07-04 06:38:39 -04:00
Jeremy Long
9fbf8b58a1 added archive.scan.depth
Former-commit-id: 4582ae764582a0435b6f672ed86393d37f8e3d05
2015-06-29 06:24:43 -04:00
Jeremy Long
70ac55f983 Merge pull request #264 from dwvisser/evidence-hashcode-contract
Evidence class compliance with Object.hashCode() contract

Former-commit-id: 94898beb75c867b58a5f8a1cd879b7b47ab47cb5
2015-06-29 06:12:17 -04:00
Jeremy Long
890bb4a2c3 changed how specification version is handled to resolve 262
Former-commit-id: c902d0683bec70a8254c7110e14b14bf0f3ad54a
2015-06-28 07:08:36 -04:00
Dale Visser
888f2aed97 Replaced ternary expression with ObjectUtils.equals(), and reformatted using IDE.
Former-commit-id: e72ba88f6e4d29cb00288c34a9d413e455f26b16
2015-06-24 15:38:03 -04:00
Dale Visser
e6707c65a5 Made magic number constants private.
Former-commit-id: 12539d50efb17790b770934d10b953e0fd180c8a
2015-06-24 15:25:03 -04:00
Dale Visser
414912de67 Replaced private method with ObjectUtils.compare().
Former-commit-id: 16c4a5a7ed74819351bab55528442183a3244a0c
2015-06-24 15:19:24 -04:00
Dale Visser
723ba740e0 Repleced testEquality() with StringUtils.equalsIgnoreCase().
Former-commit-id: 559413b9fef79fddbb85bcebda3ed0ca76c908dd
2015-06-24 15:12:15 -04:00
Dale Visser
e2389b4992 Replaced private method with calls to StringUtils.equalsIgnoreCase()
Former-commit-id: d4c92115e6f90109bfae9487ef3f4c829bf22232
2015-06-24 15:01:32 -04:00
Dale Visser
39e587085f Replaced private method with equivalent ObjectUtils.equals() call.
Former-commit-id: 9d460788899a3cb6e0891d63e39f50c3f96fc385
2015-06-24 14:57:10 -04:00
Dale Visser
42e77c77a9 More expressive/readable code using commons-lang.
Former-commit-id: bf24d6d4672c57fdbe6c1f113ddb25628ec97db4
2015-06-24 14:48:48 -04:00
Dale Visser
e8353089f3 Made hashCode() implement satisfy the Object.hashCode() contract, i.e., a.equals(b) implies a.hashCode() == b.hashCode()
Former-commit-id: 9f347a57b740b572d2d6a9a9e523de44e384773e
2015-06-24 14:41:20 -04:00
Jeremy Long
d76799cfd0 changed a warning log message to debug
Former-commit-id: dee9cc935de19052dc0d25b927b6dc4b5948504c
2015-06-24 06:16:12 -04:00
Jeremy Long
4907c20ba6 Merge pull request #263 from dwvisser/engine-javadoc
Added explanatory text to analyzeDependencies() javadoc. 

Former-commit-id: a23881a1e1d2b8c247176acb525f2e9ade601cca
2015-06-24 06:00:35 -04:00
Dale Visser
fa16a960a5 Added explanatory text to analyzeDependencies() javadoc. Fixed IDE warning about unnecessary ternary expression. Slight code reformat.
Former-commit-id: 2b2887308c853cae85a4a6f9572e3fde20f0c990
2015-06-23 11:56:09 -04:00
Jeremy Long
aa66e5ab15 removed unused JUL properties file
Former-commit-id: e02cb926d9c52fbd6f8e99dc612dea6707fac51a
2015-06-22 05:54:18 -04:00
Jeremy Long
802e4fc238 changed default logging for tests to INFO
Former-commit-id: 45b4dc2707e0a267d2ef41a84627c5fee5ba92f1
2015-06-22 05:53:51 -04:00
Jeremy Long
2494fec2a7 checkstyle/findbugs/pmd corrections
Former-commit-id: 7e1758362e3e3da13678e5e2a8bffa28b8ad5a87
2015-06-22 05:53:11 -04:00
Jeremy Long
13db27854b logback test configuration
Former-commit-id: f7af8edab353b7900eb20f088707ad9c38d9330e
2015-06-21 20:12:31 -04:00
Jeremy Long
190e17e6b9 converted to logback instead of slfj-simple
Former-commit-id: ad06d3bf039bbb30b1f10ce2ae5861000b1c05f6
2015-06-21 20:11:27 -04:00
Jeremy Long
02acd3162e removed simplelogger references
Former-commit-id: 5ab1de428d8d69a9d3f7bd8e80af0779f46b1fa0
2015-06-21 20:11:03 -04:00
Jeremy Long
47fa6e67d9 converted to logback instead of slfj-simple
Former-commit-id: 1b3854eeb5ad098931d5603b5206d88a900176da
2015-06-21 20:10:23 -04:00
Jeremy Long
aba7b47fa8 removed logback-classic version number because it is in dependency-management
Former-commit-id: 231224b23957508f2084ec06f1debf09b6513b90
2015-06-21 20:09:14 -04:00
Hans Joachim Desserud
c1ae4f8cc2 Replaced version numbers for reporting plugins in maven module
Former-commit-id: e7ec67e0692d4233a2d6d651f50b0cd250c4587b
2015-06-21 17:52:48 +02:00
Hans Joachim Desserud
4f43793e17 Bonus: fixed copyright symbol
Former-commit-id: 49d9f8af2055135238aa656cb69e20089a22e9fe
2015-06-21 17:43:04 +02:00
Hans Joachim Desserud
8fd32ebd5e Replaced version numbers for reporting plugins in cli module
Former-commit-id: c686c6f452ff1c514924d3c7dee59eaac4463af1
2015-06-21 17:42:13 +02:00
Hans Joachim Desserud
0b2059462b Replaced version numbers for reporting plugins in ant module
Former-commit-id: 6eaa38d2988142c07b92e94933564d652b7852d3
2015-06-21 17:31:42 +02:00
Hans Joachim Desserud
b0df41213a Use properties in reporting version numbers in root pom
Former-commit-id: 77b71beee1caeaa94712a8e3392952ea4bc78689
2015-06-21 14:42:15 +02:00
Hans Joachim Desserud
f421f30122 Add (and sync) reporting plugins for jenkins module
Former-commit-id: 0148cb4c9576f800f684299141e2e97b7d493bdd
2015-06-21 14:38:41 +02:00
Hans Joachim Desserud
358ac46393 Replace reporting plugin version numbers with properties in core module
Former-commit-id: dbd8588f6f259e03203700ebc2075fb8b84ff575
2015-06-21 14:27:25 +02:00
Hans Joachim Desserud
ebd98b9094 Move reporting-plugin-version properties to root pom
Former-commit-id: eeff5c2d0f0fce884c3c12b071d030e4f0107302
2015-06-21 14:27:25 +02:00
Hans Joachim Desserud
c713bb0353 Move reporting plugin version numbers into properties
Former-commit-id: a3cbc3ba8647dc2118e6c35f2819c87eb2f98e6f
2015-06-21 14:27:24 +02:00
Jeremy Long
d61a7b090d removed unused import
Former-commit-id: b5fa9e7692183a4244e23206549b63ac809aec94
2015-06-21 07:38:18 -04:00
Jeremy Long
9f1a894b86 removed duplicate slfj4-api
Former-commit-id: 480d6b7a79bfb7a52bd341d9fdef47cb6f9f1c4c
2015-06-21 07:37:59 -04:00
Jeremy Long
cc86d73719 removed unused import
Former-commit-id: 298698b4a56a0fd267ee6bc81e5abe46156b94f6
2015-06-21 07:21:42 -04:00
Jeremy Long
86b1802d16 version for slf4j-api was moved to dependency-managment in parent
Former-commit-id: c7a6567c4b0077a512765beb9ee864b9f7fa5459
2015-06-21 07:21:14 -04:00
Jeremy Long
3cdf66a0a4 removed LogUtils
Former-commit-id: d2507a7a917e0e8f2a9f4e11379aadbbd4540a43
2015-06-21 07:20:36 -04:00
Jeremy Long
faeee200d3 removed unused classes
Former-commit-id: 0f642f99d1ab168a3c97653b5abbda7ab313732a
2015-06-21 07:20:01 -04:00
Jeremy Long
4b34734919 Merge branch 'colezlaw-dependency-management'
Former-commit-id: 221f40ba56ebaa05bbf2f0eb208fdeba03252170
2015-06-21 06:48:43 -04:00
Jeremy Long
765c1b8875 dependency management updates
Former-commit-id: 7b853706a10d6caeb547fda15b217ebc7893949a
2015-06-21 06:48:25 -04:00
Jeremy Long
f89cefd9ae Merge pull request #241 from dwvisser/autoconf-files-analyzer
Autoconf files analyzer

Former-commit-id: f8e364d31b9a40abfef67ef93d91cb65527be217
2015-06-21 06:35:14 -04:00
Jeremy Long
23dfa8645c Merge branch 'master' of github.com:jeremylong/DependencyCheck
Former-commit-id: 179e9bb323f6a225d44393ac80e1d983b6af8947
2015-06-21 06:19:18 -04:00
Jeremy Long
9556e7bf51 added logback
Former-commit-id: 2d42135f5f38ae09c84d95add075b09a62b8c347
2015-06-21 06:19:10 -04:00
Jeremy Long
7fa306dd9a added logback instead of JUL to resolve issue #164
Former-commit-id: 226e52fc03b22501358d6d147d3fa042c274f1f4
2015-06-21 06:18:54 -04:00
Jeremy Long
d9a322b533 added logback
Former-commit-id: 3179f2fd13329ce3b83126b70dfa516c551978a4
2015-06-21 06:15:40 -04:00
Jeremy Long
fde1b21d1f added logback
Former-commit-id: a1f23b3181e642a41ba5e1b767b86b103d4ad134
2015-06-21 06:14:49 -04:00
Will Stranathan
124249a35d Moving all versions into the parent project
Former-commit-id: fb72244825663b3ae063daf4c5b305504532e254
2015-06-20 11:20:58 -04:00
Jeremy Long
6d6e9a6df7 Merge pull request #258 from dwvisser/dependency-class
Dependency class javadoc and code readability/re-use changes

Former-commit-id: dda65552ee613a836a20901772e93cf2d5cae003
2015-06-20 05:47:54 -04:00
Dale Visser
77ae9dfbef Extracted magic numbers in hashCode() to named constants.
Former-commit-id: e023cdf8583859215243244227bdc576b4df75f4
2015-06-19 13:47:03 -04:00
Jeremy Long
4ed642ed5d changed the lifecycle of the aggregate mojo
Former-commit-id: d8cbcc207d921a66baea8c7763329defc170a512
2015-06-19 06:42:25 -04:00
Jeremy Long
556be02696 removed the logFile as it is no longer used
Former-commit-id: 4fc60afe005df06712abe7478b431ba3c4c29562
2015-06-19 06:41:55 -04:00
Jeremy Long
572418a2f5 removed the logFile as it is no longer used
Former-commit-id: 51466302d3697dcb07673493cd33776a4bd395c3
2015-06-19 06:41:33 -04:00
Jeremy Long
fce742910e removed the logFile configuration as we now use the ant logger
Former-commit-id: 250f861439d7fbe4b2916c258aa6dd96ce5feca6
2015-06-19 06:40:56 -04:00
Jeremy Long
370b0cb049 removed the unused logFile property
Former-commit-id: 882b8abad919c9635919f5b6891845d6523c00b9
2015-06-19 06:40:07 -04:00
Jeremy Long
76333b8647 patch to resolve issue #255
Former-commit-id: 0e3f3081929a4bd93ea14a7379567f2f0c776b02
2015-06-19 05:39:31 -04:00
Jeremy Long
ce7d12e850 Merge branch 'master' of github.com:jeremylong/DependencyCheck
Former-commit-id: e7876e1969de54e1d9a139fb7e40f26748687c5b
2015-06-19 05:35:10 -04:00
Jeremy Long
63d92c62a5 additional checks added as part of issue #257
Former-commit-id: 9a9154adcc42688f600a9a6cce49a919fd08f482
2015-06-19 05:34:52 -04:00
Dale Visser
7e2720e673 Added explanatory Javadoc comments for relatedDependency behavior and purpose. Added logging whenever there is a collision adding to relatedDependency.
Former-commit-id: 99d3c9527541769e47008a9c919e4727bd2bf623
2015-06-18 19:33:57 -04:00
Dale Visser
a7c0ea3602 Line reduction in Dependency.equals(), and refactor of Dependency.hashCode() using ObjectUtils and a for loop.
Former-commit-id: e95186fe8aa0eae3c6ee45f4f5c459f86c19c636
2015-06-18 19:07:45 -04:00
Dale Visser
374829ecd5 DependencyCheck.equals() taking advantage of commons ObjectUtils now.
Former-commit-id: d72ed9b7ee7c0b634b64e90e902d7991534cde79
2015-06-18 16:51:56 -04:00
Jeremy Long
6a67c04ca2 Merge pull request #256 from dwvisser/deprecation-warning-fixes
Some minor dependency-check-core compiler warning fixes

Former-commit-id: 2a833976d69160acdf2e2d2494f16749064e170f
2015-06-17 20:51:04 -04:00
Jeremy Long
f4ce087649 moved files
Former-commit-id: 4041a13f94723e6002e82b926916d60946b3a4d2
2015-06-17 20:48:50 -04:00
Jeremy Long
4566ce7de8 added package-info
Former-commit-id: e3d0de42bc1e66cead82b043da10cd5c148fb9a1
2015-06-17 20:48:41 -04:00
Jeremy Long
a898e61a7a added package-info
Former-commit-id: be01f5e10b7550bfb180af62d111574a3d988079
2015-06-17 20:48:17 -04:00
Jeremy Long
6d524bdc99 added The OWASP Foundation
Former-commit-id: 29b73f7bb17368be8c1d0b5547290365eaedefb9
2015-06-17 20:47:24 -04:00
Jeremy Long
1ba21f7f71 fixed copyright
Former-commit-id: 8e49185e3efef904af121a69fb04c8c577ef0b6e
2015-06-17 20:46:51 -04:00
Jeremy Long
7f710e0782 Merge branch 'master' of github.com:jeremylong/DependencyCheck
Former-commit-id: 0050abb5911eb52058d7e43a65d7f3df5dda6f24
2015-06-17 20:16:54 -04:00
Jeremy Long
acb78c18bf Merge pull request #254 from colezlaw/issue-239
Now switched to slf4j

Former-commit-id: 7b2e8bc1351455620912bdcb761d639cb5588986
2015-06-17 20:16:42 -04:00
Dale Visser
5b1f632035 Isolated sources of deprecation warnings, and added warning suppression annotations. Also added a minor Enumeration -> Enumeration<JarEntry> "fix"
Former-commit-id: ccfe52d9ed50977ce73b928b09232d8635d7fcf2
2015-06-17 15:51:06 -04:00
Will Stranathan
8aca739f54 Now switched to slf4j
Former-commit-id: 880512e5998d86026cfec40b1a8a165dd6b4b8e1
2015-06-16 18:50:05 -04:00
Jeremy Long
efdfcee7fc minor tweak to test cases to prevent warnings during the test case execution
Former-commit-id: 26e14d162629b3de225193718396fa87bb932d95
2015-06-16 14:06:34 -04:00
Jeremy Long
1b4cb1379a Merge branch 'hansjoachim-more-dependency-management'
Former-commit-id: 043ec0799ce163e2a1ea38f9a7ed2113b2a892b9
2015-06-16 05:38:03 -04:00
Jeremy Long
820d7f18c4 Merge branch 'more-dependency-management' of https://github.com/hansjoachim/DependencyCheck into hansjoachim-more-dependency-management
Former-commit-id: 80ad346062356e133e8641bae1761dfc8aa396c3
2015-06-14 19:59:19 -04:00
Jeremy Long
7a5a5e0211 added the hint analyzer to the test case to fix a bug
Former-commit-id: 48062285772215509e00ff74a0d3dcf83f571cb9
2015-06-14 16:07:44 -04:00
Jeremy Long
07dda233ec patch for issue #229 to remove bundle vendor from the evidence
Former-commit-id: a5a24422d5edfb23d3ea4d4c617044051d454860
2015-06-14 15:51:26 -04:00
Jeremy Long
02209fc039 patch for issue #229 and false negative for spring security
Former-commit-id: 8c9cd15ab06a88f675241fe75f1fe193634eddf0
2015-06-14 15:50:14 -04:00
Hans Joachim Desserud
a372882c18 Upgrade maven-core, -plugin-api and -settings dependencies to version 3.3.3
Former-commit-id: 92d1b4c84ccdc70bd3dd7ae29484ac546adea6b5
2015-06-13 15:57:01 +02:00
Hans Joachim Desserud
91c05598b2 Move -maven depency version numbers to the parent pom
Former-commit-id: 64013140d893a6b9ed356c515a94951ad1c92dd4
2015-06-13 15:56:59 +02:00
Hans Joachim Desserud
0130ab6356 Remove commons-io version number from -core
Former-commit-id: e2085764fa4f4ead5e865fb5463523344f7ba228
2015-06-13 15:56:57 +02:00
Hans Joachim Desserud
581a3f8388 Move commons-io version number to parent pom
Former-commit-id: b065059630f6c06f522a1b3c0e56d99c78f363ae
2015-06-13 15:56:55 +02:00
Hans Joachim Desserud
2587ad21c0 Add upgrade warnings/notes
Former-commit-id: e35d3be893841c3a8c6a958888d87ff304e22360
2015-06-13 15:56:54 +02:00
Hans Joachim Desserud
8b56349daa Move commons-cli version number to parent pom
Former-commit-id: e80c5831d491189c867cdceb3255c655e3b76027
2015-06-13 15:56:52 +02:00
Jeremy Long
25f2eb69b9 fixed bug related to temp files
Former-commit-id: 5b492bb592122645a7a7afda34b23f54da8c2fa4
2015-06-13 08:41:19 -04:00
Jeremy Long
46b4761f1a updated test case initialization
Former-commit-id: eb6c0b16c2ca4555d81d8c66db18198991ab8d51
2015-06-13 08:32:38 -04:00
Jeremy Long
4d3f96f979 fixed bug related to temp files
Former-commit-id: 3d1a5c6d30411ab026b2f072a8aee25106bf9691
2015-06-13 08:30:05 -04:00
Jeremy Long
084371a1e3 initial long running test, commented out as not all resources are in repo yet
Former-commit-id: 3811321133cf2dfdf898677803086d8e2aa7e689
2015-06-13 08:29:30 -04:00
Jeremy Long
f5aaaf1c63 fixed bug related to an incorrect file path, resulted in NPE in the dependency bundling analyzer in rare cases
Former-commit-id: ac1846686bc1c96feaa4c3966dd91f6ad425542f
2015-06-13 08:26:18 -04:00
Jeremy Long
316bab6fff fix for issue 232 - dependencies in seperate war files were being combined as related in cli
Former-commit-id: d43f3689be374468a6877436a2b1c94d13897c30
2015-06-13 08:25:20 -04:00
Jeremy Long
65a5d38fc6 updated test configuration
Former-commit-id: 598f069d1618f8941d480a1f6020ea9969a66222
2015-06-13 08:23:54 -04:00
ma wei
aa927e9168 refactor the build.gradle file, extract publish tasks to separated gradle file
Former-commit-id: 539b8d16917a8a60e07a7f1466ce9ad0c84ca94e
2015-06-11 10:11:07 +08:00
Jeremy Long
7b0a120e66 added test-jar back
Former-commit-id: 98de0ae9efde8e5fba07f19d327703c52c5a67e7
2015-06-10 05:35:47 -04:00
Jeremy Long
143ce58cb3 patch for issue #251
Former-commit-id: b63ab10ddf415eb5b5b7d0f2a7fc2e343e3f2aef
2015-06-07 17:45:59 -04:00
Jeremy Long
9244b44ce6 Merge branch 'hansjoachim-plugins'
Former-commit-id: f88e9750d80b430e36b9557d5d8832bec15b4efc
2015-06-07 17:14:36 -04:00
Hans Joachim Desserud
4720312b26 Upgraded several plugins to latest version
Former-commit-id: 0e7cc8f6d90a31fcf59171f2432957efabe9e813
2015-06-07 17:50:42 +02:00
Jeremy Long
d43610701b added suppression entries per issuue #237
Former-commit-id: b600d89b0da9414f8f48d7d0fe3d447b75418c9c
2015-06-07 07:38:55 -04:00
Jeremy Long
243b0b2c21 removed test-jar
Former-commit-id: c6f5ee69992ba24185941e5488212f0e47286ec2
2015-06-07 07:09:32 -04:00
Jeremy Long
930894ced5 changed provided scoped dependencies to the correct test scope per issue #244
Former-commit-id: a6882023e79e158ab664ea3c39b6419b0940ce26
2015-06-07 07:09:03 -04:00
Jeremy Long
63ce7850e1 Merge branch 'hansjoachim-upgrades'
Former-commit-id: 6e19e7b75a4481314adb9b74586541a582a98018
2015-06-07 06:43:02 -04:00
Hans Joachim Desserud
984a38ce91 Renamed field to avoid shadowing
Former-commit-id: 05c0f0784d3ef4aa688e4700c790dc44d9c6d133
2015-06-06 14:05:06 +02:00
Hans Joachim Desserud
12ce2275e0 Replace deprecated use of extending BuildFileTest with BuildFileRule now that the latter is available in ant-testutil
Former-commit-id: c06ccc49f15012ef545b94d8e601c5ae0f012190
2015-06-06 14:05:06 +02:00
Hans Joachim Desserud
214b88ea1c Move Apache Ant dependency declaration to dependencyManagment and upgrade to 1.9.5
Former-commit-id: 9be839121e7423007ff293b44aa69327b7821a2c
2015-06-06 14:05:05 +02:00
Jeremy Long
980e00e824 Merge pull request #248 from colezlaw/xpathfix
Fixed the Xpath expression to be more accurate and to find the getLog…

Former-commit-id: 43c480a97da111436353fbb1bb6f686eddaf89dd
2015-06-06 06:13:33 -04:00
ma wei
80276d5e4d adjust format in README
Former-commit-id: 7bcc077020394df43130bcd35d60cf1063b6c815
2015-06-06 10:33:43 +08:00
ma wei
28c2db9edc adjust format in README
Former-commit-id: 6a905bf23bec0f939974d83d53941a54d0dd851a
2015-06-06 10:32:02 +08:00
ma wei
5e66f70cf0 Update README, add usage for install plugin from MavenCentral
Former-commit-id: 26c9119b6eeb042e46f9855a2c51c48a0675419b
2015-06-06 10:29:50 +08:00
ma wei
4e41187bf3 Modify version to 0.0.4, changed the GroupId in order to release to MavenCentral
Former-commit-id: 7f57d17742215afcf3a9e2b4aa82e93d7444439e
2015-06-06 10:28:52 +08:00
Will Stranathan
0a09760aec Fixed the Xpath expression to be more accurate and to find the getLogger statement
Former-commit-id: 2ecf96fe54d6f417f1fbf4b51092b73ed0d8a015
2015-06-05 22:03:13 -04:00
Jeremy Long
500ad62470 Merge pull request #247 from dwvisser/analyzer-logger-fix
Fixed logger name in PythonPackageAnalyzer.

Former-commit-id: 3fe7f3cce7db8733ef534b556cfd4023d9a210fc
2015-06-05 20:00:04 -04:00
Jeremy Long
1204d98e8d added rough rule to look for copy paste errors regarding loggers (i.e. obtaining the logger for the wrong class per PR #247)
Former-commit-id: ca6b2566ea496909ea4ab4f48241bb664b130183
2015-06-05 19:59:40 -04:00
Dale Visser
ae3596ac99 Fixed logger name in PythonPackageAnalyzer.
Former-commit-id: 759bdd58e3af6050014ef34cf40056fb83592249
2015-06-05 14:43:45 -04:00
Jeremy Long
d662c693f1 updated version numbers
Former-commit-id: 68c36d5c7f9b57e8b14fa9cd96fb6e74e34e4c19
2015-06-05 09:21:01 -04:00
Jeremy Long
1820cd0ae8 1.2.12-SNAPSHOT
Former-commit-id: 0099793057b121184d59f9aabfeb92b6df7ecaac
2015-06-05 09:20:49 -04:00
Jeremy Long
c5e144d211 Merge pull request #236 from ercpe/fix-dbstatements
Fix table name in dbStatements

Former-commit-id: 5ea4b843251c9c3c4d1a6b7cbf0d2b6599add3e8
2015-06-05 07:26:29 -04:00
Jeremy Long
e1c041a250 1.2.12-SNAPSHOT
Former-commit-id: 5136f7af23b3de02e6375350c2b850518ef6a3a8
2015-06-05 07:26:17 -04:00
Jeremy Long
82dff86802 added an Oxford comma for consistency
Former-commit-id: 4b6d06035396759018a6a4a843d6c573178cc5d6
2015-06-05 07:06:19 -04:00
Jeremy Long
b2019d7633 Merge pull request #243 from dwvisser/docs-index-update
Add Python, .NET to project description, fixed broken links

Former-commit-id: f192013295457df2d53853e27bc2cbcc8f09ef7b
2015-06-05 06:51:45 -04:00
Jeremy Long
f6afea0004 fix for issue #238
Former-commit-id: 3e2d26a90fb14975d01be395c9ac6a6a80297e2d
2015-06-05 06:10:41 -04:00
Jeremy Long
309eb502cd Merge branch 'master' of github.com:jeremylong/DependencyCheck
Former-commit-id: 9f114b5a00713330c15cd4b24578bee1904074ee
2015-06-05 05:58:44 -04:00
Dale Visser
712252eb6b Fixed broken links.
Former-commit-id: b14061de70552c56a81742c1bc35ffb322f75d54
2015-06-04 10:38:01 -04:00
Dale Visser
c3baf36eb5 Added .NET and Python to description on site index page.
Former-commit-id: 94f09b4e66452afc111db493d4e7195170441b5d
2015-06-04 10:23:56 -04:00
Dale Visser
771fd77088 AutoconfAnalyzer: Some 'diff' whitespace fixes.
Former-commit-id: 98b756d97d38068550a416eb1df0c7fa2e217504
2015-06-03 15:05:13 -04:00
Dale Visser
7d3ac21e42 AutoconfAnalyzer: Some code refactoring to eliminate duplicate code
and redundant condition checking, plus fixes/additions to Javadoc.

Former-commit-id: 1c18377b6d871f354915ca210df6ee22534553ba
2015-06-03 14:50:49 -04:00
Dale Visser
cc3a72f4fd AutoconfAnalyzer: Added 'configure' extension for analyzing Autoconf
output configure scripts for the package metadata shell variables
generated by autoconf. Passing tests that look at metadata generated
in Readable's and Binutil's configure scripts.

Former-commit-id: d20bb17ccf4fde848dede4b87805241387e73f89
2015-06-03 14:50:24 -04:00
Dale Visser
d87fa374ec Autoconf analyzer: added configure.in and unquoted values processing.
Former-commit-id: 4cedd800c60250f19deaebf8cdff9db4e310e7ab
2015-06-03 14:50:24 -04:00
Dale Visser
154a576388 AutoconfAnalyzer: added analyzer to service registry; added needed command-line hooks, flags.
Former-commit-id: bc4cc6082b4181286e62e736f4d5751e60379779
2015-06-03 14:50:17 -04:00
Dale Visser
29fe71b82c AutoconfAnalyzer: Minor code fixes to address some static analysis warnings.
Former-commit-id: a8ad815fd8fbcf1b21141093d92da0758c344d01
2015-06-03 14:50:09 -04:00
Dale Visser
e960fd31fa AutoconfAnalyzer: Added unit test for Readable's configure.ac, which makes full usage of
available AC_INIT parameters.

Former-commit-id: e545f3f85b70f15c1650528af9afd940984478a1
2015-06-03 14:49:45 -04:00
Dale Visser
132fb87c2c Initial AutconfAnalyzer implementation, along with passing unit tests utilizing
GNU Ghostscript's configure.ac.


Former-commit-id: 50f2c5bc8ba83f346471cde091b4cc46947ed246
2015-06-03 14:39:49 -04:00
ma wei
6e281e0b66 Add gradle task to publish this plugin to Maven Central
Former-commit-id: dc5bfcbc382d7eb5f227c655214560082c8b9633
2015-06-02 14:14:06 +08:00
ma wei
a86ff9dfd1 Add ability for user to configure CVE url in gradle build file
Former-commit-id: 186b03ff55c9cab1c7bd788e5ea5ba1f8e79241e
2015-06-02 14:03:03 +08:00
Johann Schmitz
3ea33f1dd6 Fix table name in dbStatements
Changed table name to "software" to avoid SQL errors on MySQL (case sensitive)


Former-commit-id: c5097a534c778db2e919ce26d54ba175767e8c8a
2015-05-27 16:44:42 +02:00
Johann Schmitz
b645fd495f Fix table name in dbStatements
Changed table name to "cpeEntry" to avoid SQL errors on MySQL (case sensitive)


Former-commit-id: 0b9bedaad63a4310c4b3aa69fc0351a1b27f55ce
2015-05-27 10:18:20 +02:00
Jeremy Long
5e635224e2 Merge pull request #235 from wmaintw/master
Add license information and modify the legalese for the gradle plugin

Former-commit-id: b880c195086735cab18ee3c78ef2f074aff625d8
2015-05-25 11:32:51 -04:00
ma wei
54d8becd74 Add license information and modify the legalese
Former-commit-id: 4586da1e5e89cc90cdd67f7a06ee3d7fe2e5871f
2015-05-25 12:23:58 +08:00
Jeremy Long
87243537e7 fixed documentation (path to mono) per issue #234
Former-commit-id: bc56be22a694aea6868bb7a23b38bd992e3c7bbd
2015-05-23 07:13:33 -04:00
Jeremy Long
0604361d4e Merge branch 'wmaintw-master'
Former-commit-id: 16f0045f42b6bb19b7f3e0a7e74afee4a5a62e4c
2015-05-23 06:48:08 -04:00
Jeremy Long
399c052129 Merge branch 'master' of https://github.com/wmaintw/DependencyCheck into wmaintw-master
Former-commit-id: 2974aad3031a4b5746f735640f9fdee430e5b709
2015-05-23 06:46:11 -04:00
Jeremy Long
362c7e9c04 added notes for future enhancement
Former-commit-id: 2886464dc1fd657b79f1763eec2862bbf2c11af7
2015-05-23 06:39:23 -04:00
ma wei
c4843253c0 update version in README
Former-commit-id: f34bbf1704c329035cb12b739b40505cdae88230
2015-05-22 15:14:21 +08:00
Jeremy Long
928d8dbb15 Merge pull request #233 from dwvisser/add-dvisser-as-contributor
Added Dale Visser to the developers list.

Former-commit-id: d960cd48ef9541c57ca6b9e545f23a6833105392
2015-05-17 07:20:42 -04:00
ma wei
7f528d8d4a update proxy setting usage
Former-commit-id: fd11b04a2a68619e9481e9d1341ef64b96dbf9d8
2015-05-15 11:39:52 +08:00
ma wei
8ddf4a0e72 add ability for user to customize proxy setting
Former-commit-id: 8b5093ccc75751bbea1415334bd5f4335bc5bbc6
2015-05-15 11:33:17 +08:00
Dale Visser
5c589136e5 Added Dale Visser to the developers list.
Former-commit-id: 678f9e701a59148288b20a62978343d4633b2fb2
2015-05-14 11:06:57 -04:00
ma wei
00f10771d9 modify the plugin version to 0.0.2 in README
Former-commit-id: aab6f65c20e565647a7fd91f04af63aeedeab32f
2015-05-14 11:27:02 +08:00
ma wei
583a5cda61 upgrade dependency-core version to 1.2.11
Former-commit-id: c36d8d962a352c574cacabc73a4e12c1bb6c7a7a
2015-05-14 11:25:18 +08:00
ma wei
f9e5ebccfd modify usage for multiple sub project
Former-commit-id: 295bd3be8cc8901c7d103b51d6a4041b53bbcb3e
2015-05-14 11:24:29 +08:00
ma wei
77eebb6c1b update version to 0.0.2
Former-commit-id: d999f6fe3e9f5d6c309bdbda5581fb586e895930
2015-05-14 11:06:34 +08:00
ma wei
53d01e5fe1 update usage for multiple sub project
Former-commit-id: 16aac082aa5f52f01db8f6935252306bd176860b
2015-05-14 11:04:39 +08:00
ma wei
3d08e8db06 generate report for each sub project in different directory
Former-commit-id: f13f90e93121734706ad2bc43bf47d70533b117c
2015-05-14 10:48:15 +08:00
Jeremy Long
50a3ce2036 fixed typo in file name
Former-commit-id: eec0a010dd1c88cbf933446920f2d5a53932714a
2015-05-13 08:12:30 -04:00
Jeremy Long
a127948c4c sorted projects in each list
Former-commit-id: 5b36f4f7b35f2eff178779307466976767e2e933
2015-05-13 06:36:19 -04:00
Jeremy Long
36b406f7ec added a link to the python analyzer
Former-commit-id: fa73940acd758656f3876cff9d3b8e475ef430e1
2015-05-13 06:35:35 -04:00
Jeremy Long
3c50e9f784 fixed markdown syntax and added a link to the python analyzer
Former-commit-id: 286dd35ae2c1480a7bd2fac2a4f0d59d7990a12f
2015-05-13 06:35:18 -04:00
ma wei
51e2af148e modify code format in README.md
Former-commit-id: 5aabb0ec41022da8a2e0c83fc0317e7cbcc4cb95
2015-05-13 10:22:36 +08:00
ma wei
d7351f97fe add ability to publish this plugin to Gradle Plugin Portal
Former-commit-id: 7dd9400a1871db2c880cecee3297734f39b3be4e
2015-05-13 10:17:07 +08:00
ma wei
51c3ebcdb8 add ability to publish this plugin to Bintray
Former-commit-id: 61b03346bf2f3035d52f86c88d8d655099ed5eb7
2015-05-12 16:45:40 +08:00
ma wei
f29e4a5d36 remove unused code
Former-commit-id: 1844b759f255550545b4ead5e527289c2ad8b1a7
2015-05-12 16:22:15 +08:00
ma wei
1fa59270c1 add README
Former-commit-id: d1a87185137952bdfeaf6dd2543e0ff748e37859
2015-05-08 16:04:29 +08:00
ma wei
d5c1fda958 rename plugin name to 'dependency-check'
Former-commit-id: 240e44081b0f0a6e2678f8b2c37ef8684f245ffd
2015-05-08 16:04:18 +08:00
ma wei
b0e34fd062 add gradle task, utilize dependency-check-core functions to check dependency vulnerabilities
Former-commit-id: bef42df0ed3869fbceb4fe3ec459228031554439
2015-05-08 15:35:55 +08:00
ma wei
8fbf8df0bd initial check in
Former-commit-id: 2be8565868e6c8d62f86fcd7c70d350464c667c6
2015-05-07 11:25:31 +08:00
284 changed files with 303929 additions and 14623 deletions

2
.gitignore vendored
View File

@@ -1,4 +1,6 @@
*/target/**
# IntelliJ test run side-effects
dependency-check-core/data/
# Intellij project files
*.iml
*.ipr

View File

@@ -13,8 +13,10 @@ For instructions on the use of the Jenkins plugin please see the [Jenkins depend
### Command Line
More detailed instructions can be found on the [dependency-check github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-cli/installation.html).
The latest CLI can be downloaded from bintray's [dependency-check page](https://bintray.com/jeremy-long/owasp/dependency-check).
More detailed instructions can be found on the
[dependency-check github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-cli/).
The latest CLI can be downloaded from bintray's
[dependency-check page](https://bintray.com/jeremy-long/owasp/dependency-check).
On *nix
```
@@ -26,6 +28,12 @@ On Windows
> bin/dependency-check.bat -h
> bin/dependency-check.bat --app Testing --out . --scan [path to jar files to be scanned]
```
On Mac with [Homebrew](http://brew.sh)
```
$ brew update && brew install dependency-check
$ dependency-check -h
$ dependency-check --app Testing --out . --scan [path to jar files to be scanned]
```
### Maven Plugin
@@ -65,8 +73,12 @@ Development Usage
The following instructions outline how to compile and use the current snapshot. While every intention is to maintain a stable snapshot it is recommended
that the release versions listed above be used.
Note, currently the install goal may take a long time to execute the integration tests. However, if this takes more then 30 minutes it is likely that the
download of data from the NVD is having an issue. This issue is still being researched and a solution should be published soon.
The repository has some large files due to test resources. The team has tried to cleanup the history as much as possible.
However, it is recommended that you perform a shallow clone to save yourself time:
```bash
git clone --depth 1 git@github.com:jeremylong/DependencyCheck.git
```
On *nix
```
@@ -95,7 +107,7 @@ Archive: [google group](https://groups.google.com/forum/#!forum/dependency-check
Copyright & License
-
Dependency-Check is Copyright (c) 2012-2014 Jeremy Long. All Rights Reserved.
Dependency-Check is Copyright (c) 2012-2015 Jeremy Long. All Rights Reserved.
Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://github.com/jeremylong/DependencyCheck/dependency-check-cli/blob/master/LICENSE.txt) file for the full license.

View File

@@ -20,7 +20,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<parent>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId>
<version>1.2.11</version>
<version>1.3.0</version>
</parent>
<artifactId>dependency-check-ant</artifactId>
@@ -276,7 +276,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-project-info-reports-plugin</artifactId>
<version>2.7</version>
<version>${reporting.project-info-reports-plugin.version}</version>
<reportSets>
<reportSet>
<reports>
@@ -290,7 +290,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>2.9.1</version>
<version>${reporting.javadoc-plugin.version}</version>
<configuration>
<failOnError>false</failOnError>
<bottom>Copyright© 2012-15 Jeremy Long. All Rights Reserved.</bottom>
@@ -307,7 +307,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>versions-maven-plugin</artifactId>
<version>2.1</version>
<version>${reporting.versions-plugin.version}</version>
<reportSets>
<reportSet>
<reports>
@@ -320,17 +320,17 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jxr-plugin</artifactId>
<version>2.4</version>
<version>${reporting.jxr-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>cobertura-maven-plugin</artifactId>
<version>2.6</version>
<version>${reporting.cobertura-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-report-plugin</artifactId>
<version>2.16</version>
<version>${reporting.surefire-report-plugin.version}</version>
<reportSets>
<reportSet>
<reports>
@@ -342,7 +342,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>taglist-maven-plugin</artifactId>
<version>2.4</version>
<version>${reporting.taglist-plugin.version}</version>
<configuration>
<tagListOptions>
<tagClasses>
@@ -366,7 +366,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>2.11</version>
<version>${reporting.checkstyle-plugin.version}</version>
<configuration>
<enableRulesSummary>false</enableRulesSummary>
<enableFilesSummary>false</enableFilesSummary>
@@ -379,7 +379,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-pmd-plugin</artifactId>
<version>3.0.1</version>
<version>${reporting.pmd-plugin.version}</version>
<configuration>
<targetJdk>1.6</targetJdk>
<linkXref>true</linkXref>
@@ -398,7 +398,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
<version>2.5.3</version>
<version>${reporting.findbugs-plugin.version}</version>
</plugin>
</plugins>
</reporting>
@@ -423,12 +423,10 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<dependency>
<groupId>org.apache.ant</groupId>
<artifactId>ant</artifactId>
<version>1.9.4</version>
</dependency>
<dependency>
<groupId>org.apache.ant</groupId>
<artifactId>ant-testutil</artifactId>
<version>1.9.4</version>
<scope>test</scope>
</dependency>
</dependencies>

View File

@@ -0,0 +1,271 @@
/*
* This file is part of dependency-check-ant.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 The OWASP Foundation. All Rights Reserved.
*/
package org.owasp.dependencycheck.ant.logging;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import org.slf4j.helpers.FormattingTuple;
import org.slf4j.helpers.MarkerIgnoringBase;
import org.slf4j.helpers.MessageFormatter;
/**
* An instance of {@link org.slf4j.Logger} which simply calls the log method on the delegate Ant task
*
* @author colezlaw
*/
public class AntLoggerAdapter extends MarkerIgnoringBase {
/**
* A reference to the Ant task used for logging.
*/
private Task task;
/**
* Constructs an Ant Logger Adapter.
*
* @param task the Ant Task to use for logging
*/
public AntLoggerAdapter(Task task) {
super();
this.task = task;
}
/**
* Sets the current Ant task to use for logging.
*
* @param task the Ant task to use for logging
*/
public void setTask(Task task) {
this.task = task;
}
@Override
public boolean isTraceEnabled() {
// Might be a more efficient way to do this, but Ant doesn't enable or disable
// various levels globally - it just fires things at registered Listeners.
return true;
}
@Override
public void trace(String msg) {
task.log(msg, Project.MSG_VERBOSE);
}
@Override
public void trace(String format, Object arg) {
if (task != null) {
final FormattingTuple tp = MessageFormatter.format(format, arg);
task.log(tp.getMessage(), Project.MSG_VERBOSE);
}
}
@Override
public void trace(String format, Object arg1, Object arg2) {
if (task != null) {
final FormattingTuple tp = MessageFormatter.format(format, arg1, arg2);
task.log(tp.getMessage(), Project.MSG_VERBOSE);
}
}
@Override
public void trace(String format, Object... arguments) {
if (task != null) {
final FormattingTuple tp = MessageFormatter.format(format, arguments);
task.log(tp.getMessage(), Project.MSG_VERBOSE);
}
}
@Override
public void trace(String msg, Throwable t) {
if (task != null) {
task.log(msg, t, Project.MSG_VERBOSE);
}
}
@Override
public boolean isDebugEnabled() {
return true;
}
@Override
public void debug(String msg) {
if (task != null) {
task.log(msg, Project.MSG_DEBUG);
}
}
@Override
public void debug(String format, Object arg) {
if (task != null) {
final FormattingTuple tp = MessageFormatter.format(format, arg);
task.log(tp.getMessage(), Project.MSG_DEBUG);
}
}
@Override
public void debug(String format, Object arg1, Object arg2) {
if (task != null) {
final FormattingTuple tp = MessageFormatter.format(format, arg1, arg2);
task.log(tp.getMessage(), Project.MSG_DEBUG);
}
}
@Override
public void debug(String format, Object... arguments) {
if (task != null) {
final FormattingTuple tp = MessageFormatter.format(format, arguments);
task.log(tp.getMessage(), Project.MSG_DEBUG);
}
}
@Override
public void debug(String msg, Throwable t) {
if (task != null) {
task.log(msg, t, Project.MSG_DEBUG);
}
}
@Override
public boolean isInfoEnabled() {
return true;
}
@Override
public void info(String msg) {
if (task != null) {
task.log(msg, Project.MSG_INFO);
}
}
@Override
public void info(String format, Object arg) {
if (task != null) {
final FormattingTuple tp = MessageFormatter.format(format, arg);
task.log(tp.getMessage(), Project.MSG_INFO);
}
}
@Override
public void info(String format, Object arg1, Object arg2) {
if (task != null) {
final FormattingTuple tp = MessageFormatter.format(format, arg1, arg2);
task.log(tp.getMessage(), Project.MSG_INFO);
}
}
@Override
public void info(String format, Object... arguments) {
if (task != null) {
final FormattingTuple tp = MessageFormatter.format(format, arguments);
task.log(tp.getMessage(), Project.MSG_INFO);
}
}
@Override
public void info(String msg, Throwable t) {
if (task != null) {
task.log(msg, t, Project.MSG_INFO);
}
}
@Override
public boolean isWarnEnabled() {
return true;
}
@Override
public void warn(String msg) {
if (task != null) {
task.log(msg, Project.MSG_WARN);
}
}
@Override
public void warn(String format, Object arg) {
if (task != null) {
final FormattingTuple tp = MessageFormatter.format(format, arg);
task.log(tp.getMessage(), Project.MSG_WARN);
}
}
@Override
public void warn(String format, Object... arguments) {
if (task != null) {
final FormattingTuple tp = MessageFormatter.format(format, arguments);
task.log(tp.getMessage(), Project.MSG_WARN);
}
}
@Override
public void warn(String format, Object arg1, Object arg2) {
if (task != null) {
final FormattingTuple tp = MessageFormatter.format(format, arg1, arg2);
task.log(tp.getMessage(), Project.MSG_WARN);
}
}
@Override
public void warn(String msg, Throwable t) {
if (task != null) {
task.log(msg, t, Project.MSG_WARN);
}
}
@Override
public boolean isErrorEnabled() {
return true;
}
@Override
public void error(String msg) {
if (task != null) {
task.log(msg, Project.MSG_ERR);
}
}
@Override
public void error(String format, Object arg) {
if (task != null) {
final FormattingTuple tp = MessageFormatter.format(format, arg);
task.log(tp.getMessage(), Project.MSG_ERR);
}
}
@Override
public void error(String format, Object arg1, Object arg2) {
if (task != null) {
final FormattingTuple tp = MessageFormatter.format(format, arg1, arg2);
task.log(tp.getMessage(), Project.MSG_ERR);
}
}
@Override
public void error(String format, Object... arguments) {
if (task != null) {
final FormattingTuple tp = MessageFormatter.format(format, arguments);
task.log(tp.getMessage(), Project.MSG_ERR);
}
}
@Override
public void error(String msg, Throwable t) {
if (task != null) {
task.log(msg, t, Project.MSG_ERR);
}
}
}

View File

@@ -0,0 +1,56 @@
/*
* This file is part of dependency-check-ant.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 The OWASP Foundation. All Rights Reserved.
*/
package org.owasp.dependencycheck.ant.logging;
import org.apache.tools.ant.Task;
import org.slf4j.ILoggerFactory;
import org.slf4j.Logger;
/**
* An implementation of {@link org.slf4j.ILoggerFactory} which always returns {@link AntLoggerAdapter} instances.
*
* @author colezlaw
*/
public class AntLoggerFactory implements ILoggerFactory {
/**
* A reference to the Ant logger Adapter.
*/
private final AntLoggerAdapter antLoggerAdapter;
/**
* Constructs a new Ant Logger Factory.
*
* @param task the Ant task to use for logging
*/
public AntLoggerFactory(Task task) {
super();
this.antLoggerAdapter = new AntLoggerAdapter(task);
}
/**
* Returns the Ant logger adapter.
*
* @param name ignored in this implementation
* @return the Ant logger adapter
*/
@Override
public Logger getLogger(String name) {
return antLoggerAdapter;
}
}

View File

@@ -0,0 +1,4 @@
/**
* This package includes the Ant task definitions.
*/
package org.owasp.dependencycheck.ant.logging;

View File

@@ -21,9 +21,8 @@ import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.types.EnumeratedAttribute;
import org.apache.tools.ant.types.Reference;
@@ -40,8 +39,8 @@ import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.reporting.ReportGenerator;
import org.owasp.dependencycheck.reporting.ReportGenerator.Format;
import org.owasp.dependencycheck.utils.LogUtils;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.impl.StaticLoggerBinder;
/**
* An Ant task definition to execute dependency-check during an Ant build.
@@ -54,24 +53,19 @@ public class DependencyCheckTask extends Task {
* The properties file location.
*/
private static final String PROPERTIES_FILE = "task.properties";
/**
* Name of the logging properties file.
*/
private static final String LOG_PROPERTIES_FILE = "log.properties";
/**
* System specific new line character.
*/
private static final String NEW_LINE = System.getProperty("line.separator", "\n").intern();
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(DependencyCheckTask.class.getName());
/**
* Construct a new DependencyCheckTask.
*/
public DependencyCheckTask() {
super();
// Call this before Dependency Check Core starts logging anything - this way, all SLF4J messages from
// core end up coming through this tasks logger
StaticLoggerBinder.getSingleton().setTask(this);
}
//The following code was copied Apache Ant PathConvert
//BEGIN COPY from org.apache.tools.ant.taskdefs.PathConvert
@@ -349,7 +343,7 @@ public class DependencyCheckTask extends Task {
*/
@Deprecated
public void setProxyUrl(String proxyUrl) {
LOGGER.warning("A deprecated configuration option 'proxyUrl' was detected; use 'proxyServer' instead.");
log("A deprecated configuration option 'proxyUrl' was detected; use 'proxyServer' instead.", Project.MSG_WARN);
this.proxyServer = proxyUrl;
}
/**
@@ -440,28 +434,6 @@ public class DependencyCheckTask extends Task {
public void setConnectionTimeout(String connectionTimeout) {
this.connectionTimeout = connectionTimeout;
}
/**
* The file path used for verbose logging.
*/
private String logFile = null;
/**
* Get the value of logFile.
*
* @return the value of logFile
*/
public String getLogFile() {
return logFile;
}
/**
* Set the value of logFile.
*
* @param logFile new value of logFile
*/
public void setLogFile(String logFile) {
this.logFile = logFile;
}
/**
* The path to the suppression file.
*/
@@ -925,9 +897,6 @@ public class DependencyCheckTask extends Task {
@Override
public void execute() throws BuildException {
final InputStream in = DependencyCheckTask.class.getClassLoader().getResourceAsStream(LOG_PROPERTIES_FILE);
LogUtils.prepareLogger(in, logFile);
dealWithReferences();
validateConfiguration();
populateSettings();
@@ -958,7 +927,7 @@ public class DependencyCheckTask extends Task {
cve.open();
prop = cve.getDatabaseProperties();
} catch (DatabaseException ex) {
LOGGER.log(Level.FINE, "Unable to retrieve DB Properties", ex);
log("Unable to retrieve DB Properties", ex, Project.MSG_DEBUG);
} finally {
if (cve != null) {
cve.close();
@@ -974,16 +943,15 @@ public class DependencyCheckTask extends Task {
showSummary(engine.getDependencies());
}
} catch (IOException ex) {
LOGGER.log(Level.FINE, "Unable to generate dependency-check report", ex);
log("Unable to generate dependency-check report", ex, Project.MSG_DEBUG);
throw new BuildException("Unable to generate dependency-check report", ex);
} catch (Exception ex) {
LOGGER.log(Level.FINE, "An exception occurred; unable to continue task", ex);
log("An exception occurred; unable to continue task", ex, Project.MSG_DEBUG);
throw new BuildException("An exception occurred; unable to continue task", ex);
}
}
} catch (DatabaseException ex) {
LOGGER.log(Level.SEVERE, "Unable to connect to the dependency-check database; analysis has stopped");
LOGGER.log(Level.FINE, "", ex);
log("Unable to connect to the dependency-check database; analysis has stopped", ex, Project.MSG_ERR);
} finally {
Settings.cleanup(true);
if (engine != null) {
@@ -1017,14 +985,13 @@ public class DependencyCheckTask extends Task {
taskProperties = this.getClass().getClassLoader().getResourceAsStream(PROPERTIES_FILE);
Settings.mergeProperties(taskProperties);
} catch (IOException ex) {
LOGGER.log(Level.WARNING, "Unable to load the dependency-check ant task.properties file.");
LOGGER.log(Level.FINE, null, ex);
log("Unable to load the dependency-check ant task.properties file.", ex, Project.MSG_WARN);
} finally {
if (taskProperties != null) {
try {
taskProperties.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
log("", ex, Project.MSG_DEBUG);
}
}
}
@@ -1176,7 +1143,7 @@ public class DependencyCheckTask extends Task {
final String msg = String.format("%n%n"
+ "One or more dependencies were identified with known vulnerabilities:%n%n%s"
+ "%n%nSee the dependency-check report for more details.%n%n", summary.toString());
LOGGER.log(Level.WARNING, msg);
log(msg, Project.MSG_WARN);
}
}

View File

@@ -1,4 +1,4 @@
/**
* This package includes the Ant task definitions.
* This package includes the a slf4j logging implementation that wraps the Ant logger.
*/
package org.owasp.dependencycheck.taskdefs;

View File

@@ -0,0 +1,103 @@
/*
* This file is part of dependency-check-ant.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 The OWASP Foundation. All Rights Reserved.
*/
package org.slf4j.impl;
import org.apache.tools.ant.Task;
import org.owasp.dependencycheck.ant.logging.AntLoggerFactory;
import org.slf4j.ILoggerFactory;
import org.slf4j.spi.LoggerFactoryBinder;
/**
* The binding of {@link LoggerFactory} class with an actual instance of {@link ILoggerFactory} is performed using information
* returned by this class.
*
* @author colezlaw
*/
public class StaticLoggerBinder implements LoggerFactoryBinder {
/**
* The unique instance of this class
*
*/
private static final StaticLoggerBinder SINGLETON = new StaticLoggerBinder();
/**
* Return the singleton of this class.
*
* @return the StaticLoggerBinder singleton
*/
public static final StaticLoggerBinder getSingleton() {
return SINGLETON;
}
/**
* Ant tasks have the log method we actually want to call. So we hang onto the task as a delegate
*/
private Task task = null;
/**
* Set the Task which will this is to log through.
*
* @param task the task through which to log
*/
public void setTask(Task task) {
this.task = task;
loggerFactory = new AntLoggerFactory(task);
}
/**
* Declare the version of the SLF4J API this implementation is compiled against. The value of this filed is usually modified
* with each release.
*/
// to avoid constant folding by the compiler, this field must *not* be final
public static String REQUESTED_API_VERSION = "1.7.12"; // final
private static final String LOGGER_FACTORY_CLASS = AntLoggerFactory.class.getName();
/**
* The ILoggerFactory instance returned by the {@link #getLoggerFactory} method should always be the smae object
*/
private ILoggerFactory loggerFactory;
/**
* Constructs a new static logger binder.
*/
private StaticLoggerBinder() {
loggerFactory = new AntLoggerFactory(task);
}
/**
* Returns the logger factory.
*
* @return the logger factory
*/
@Override
public ILoggerFactory getLoggerFactory() {
return loggerFactory;
}
/**
* Returns the logger factory class string.
*
* @return the logger factory class string
*/
@Override
public String getLoggerFactoryClassStr() {
return LOGGER_FACTORY_CLASS;
}
}

View File

@@ -0,0 +1,4 @@
/**
* This package contains the static binder for the slf4j-ant logger.
*/
package org.slf4j.impl;

View File

@@ -1,23 +0,0 @@
handlers=java.util.logging.ConsoleHandler, java.util.logging.FileHandler
# logging levels
# FINEST, FINER, FINE, CONFIG, INFO, WARNING and SEVERE.
# Configure the ConsoleHandler.
java.util.logging.ConsoleHandler.level=INFO
#org.owasp.dependencycheck.data.nvdcve.xml
# Configure the FileHandler.
#java.util.logging.FileHandler.formatter=java.util.logging.SimpleFormatter
#java.util.logging.FileHandler.level=FINEST
# The following special tokens can be used in the pattern property
# which specifies the location and name of the log file.
# / - standard path separator
# %t - system temporary directory
# %h - value of the user.home system property
# %g - generation number for rotating logs
# %u - unique number to avoid conflicts
# FileHandler writes to %h/demo0.log by default.
#java.util.logging.FileHandler.pattern=./target/dependency-check.log

View File

@@ -23,21 +23,20 @@ Configuration
====================
The following properties can be set on the dependency-check-maven plugin.
Property | Description | Default Value
---------------------|------------------------------------|------------------
autoUpdate | Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not recommended that this be turned to false. | true
updateOnly | If set to true only the update phase of dependency-check will be executed; no scan will be executed and no report will be generated. | false
externalReport | When using as a Site plugin this parameter sets whether or not the external report format should be used. | false
outputDirectory | The location to write the report(s). Note, this is not used if generating the report as part of a `mvn site` build | 'target'
failBuildOnCVSS | Specifies if the build should be failed if a CVSS score above a specified level is identified. The default is 11 which means since the CVSS scores are 0-10, by default the build will never fail. | 11
format | The report format to be generated (HTML, XML, VULN, ALL). This configuration option has no affect if using this within the Site plugin unless the externalReport is set to true. | HTML
logFile | The file path to write verbose logging information. | &nbsp;
suppressionFile | The file path to the XML suppression file \- used to suppress [false positives](../suppression.html) | &nbsp;
proxyServer | The Proxy Server. | &nbsp;
proxyPort | The Proxy Port. | &nbsp;
proxyUsername | Defines the proxy user name. | &nbsp;
proxyPassword | Defines the proxy password. | &nbsp;
connectionTimeout | The URL Connection Timeout. | &nbsp;
Property | Description | Default Value
----------------------|------------------------------------|------------------
autoUpdate | Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not recommended that this be turned to false. | true
updateOnly | If set to true only the update phase of dependency-check will be executed; no scan will be executed and no report will be generated. | false
externalReport | When using as a Site plugin this parameter sets whether or not the external report format should be used. | false
reportOutputDirectory | The location to write the report(s). Note, this is not used if generating the report as part of a `mvn site` build | 'target'
failBuildOnCVSS | Specifies if the build should be failed if a CVSS score above a specified level is identified. The default is 11 which means since the CVSS scores are 0-10, by default the build will never fail. | 11
reportFormat | The report format to be generated (HTML, XML, VULN, ALL). This configuration option has no affect if using this within the Site plugin unless the externalReport is set to true. | HTML
suppressionFile | The file path to the XML suppression file \- used to suppress [false positives](../general/suppression.html) | &nbsp;
proxyServer | The Proxy Server. | &nbsp;
proxyPort | The Proxy Port. | &nbsp;
proxyUsername | Defines the proxy user name. | &nbsp;
proxyPassword | Defines the proxy password. | &nbsp;
connectionTimeout | The URL Connection Timeout. | &nbsp;
Analyzer Configuration
====================
@@ -65,12 +64,12 @@ Advanced Configuration
The following properties can be configured in the plugin. However, they are less frequently changed. One exception
may be the cvedUrl properties, which can be used to host a mirror of the NVD within an enterprise environment.
Property | Description | Default Value
---------------------|-------------------------------------------------------------------------|------------------
cveUrl12Modified | URL for the modified CVE 1.2 | http://nvd.nist.gov/download/nvdcve-modified.xml
cveUrl20Modified | URL for the modified CVE 2.0 | http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-modified.xml
cveUrl12Base | Base URL for each year's CVE 1.2, the %d will be replaced with the year | http://nvd.nist.gov/download/nvdcve-%d.xml
cveUrl20Base | Base URL for each year's CVE 2.0, the %d will be replaced with the year | http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml
Property | Description | Default Value
---------------------|--------------------------------------------------------------------------|------------------
cveUrl12Modified | URL for the modified CVE 1.2. | http://nvd.nist.gov/download/nvdcve-modified.xml
cveUrl20Modified | URL for the modified CVE 2.0. | http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-modified.xml
cveUrl12Base | Base URL for each year's CVE 1.2, the %d will be replaced with the year. | http://nvd.nist.gov/download/nvdcve-%d.xml
cveUrl20Base | Base URL for each year's CVE 2.0, the %d will be replaced with the year. | http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml
dataDirectory | Data directory to hold SQL CVEs contents. This should generally not be changed. | &nbsp;
databaseDriverName | The name of the database driver. Example: org.h2.Driver. | &nbsp;
databaseDriverPath | The path to the database driver JAR file; only used if the driver is not in the class path. | &nbsp;

View File

@@ -18,34 +18,41 @@
package org.owasp.dependencycheck.taskdefs;
import java.io.File;
import org.apache.tools.ant.BuildFileTest;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.BuildFileRule;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.owasp.dependencycheck.data.nvdcve.BaseDBTestCase;
import org.owasp.dependencycheck.utils.Settings;
import static org.junit.Assert.assertTrue;
/**
*
* @author Jeremy Long
*/
public class DependencyCheckTaskTest extends BuildFileTest {
//TODO: The use of deprecated class BuildFileTestcan possibly
//be replaced with BuildFileRule. However, it currently isn't included in the ant-testutil jar.
//This should be fixed in ant-testutil 1.9.5, so we can check back once that has been released.
//Reference: http://mail-archives.apache.org/mod_mbox/ant-user/201406.mbox/%3C000001cf87ba$8949b690$9bdd23b0$@de%3E
public class DependencyCheckTaskTest {
@Rule
public BuildFileRule buildFileRule = new BuildFileRule();
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Before
@Override
public void setUp() throws Exception {
Settings.initialize();
BaseDBTestCase.ensureDBExists();
final String buildFile = this.getClass().getClassLoader().getResource("build.xml").getPath();
configureProject(buildFile);
buildFileRule.configureProject(buildFile);
}
@After
@Override
public void tearDown() {
//no cleanup...
//executeTarget("cleanup");
@@ -63,7 +70,7 @@ public class DependencyCheckTaskTest extends BuildFileTest {
throw new Exception("Unable to delete 'target/DependencyCheck-Report.html' prior to test.");
}
}
executeTarget("test.fileset");
buildFileRule.executeTarget("test.fileset");
assertTrue("DependencyCheck report was not generated", report.exists());
@@ -82,7 +89,7 @@ public class DependencyCheckTaskTest extends BuildFileTest {
throw new Exception("Unable to delete 'target/DependencyCheck-Report.xml' prior to test.");
}
}
executeTarget("test.filelist");
buildFileRule.executeTarget("test.filelist");
assertTrue("DependencyCheck report was not generated", report.exists());
}
@@ -100,7 +107,7 @@ public class DependencyCheckTaskTest extends BuildFileTest {
throw new Exception("Unable to delete 'target/DependencyCheck-Vulnerability.html' prior to test.");
}
}
executeTarget("test.dirset");
buildFileRule.executeTarget("test.dirset");
assertTrue("DependencyCheck report was not generated", report.exists());
}
@@ -109,7 +116,7 @@ public class DependencyCheckTaskTest extends BuildFileTest {
*/
@Test
public void testGetFailBuildOnCVSS() {
expectBuildException("failCVSS", "asdfasdfscore");
System.out.println(this.getOutput());
expectedException.expect(BuildException.class);
buildFileRule.executeTarget("failCVSS");
}
}

View File

@@ -20,7 +20,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<parent>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId>
<version>1.2.11</version>
<version>1.3.0</version>
</parent>
<artifactId>dependency-check-cli</artifactId>
@@ -44,6 +44,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<directory>src/main/resources</directory>
<includes>
<include>**/*.properties</include>
<include>logback.xml</include>
</includes>
<filtering>true</filtering>
</resource>
@@ -180,7 +181,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-project-info-reports-plugin</artifactId>
<version>2.7</version>
<version>${reporting.project-info-reports-plugin.version}</version>
<reportSets>
<reportSet>
<reports>
@@ -194,10 +195,10 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>2.9.1</version>
<version>${reporting.javadoc-plugin.version}</version>
<configuration>
<failOnError>false</failOnError>
<bottom>Copyright© 2012-15 Jeremy Long. All Rights Reserved.</bottom>
<bottom>Copyright<EFBFBD> 2012-15 Jeremy Long. All Rights Reserved.</bottom>
</configuration>
<reportSets>
<reportSet>
@@ -211,7 +212,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>versions-maven-plugin</artifactId>
<version>2.1</version>
<version>${reporting.versions-plugin.version}</version>
<reportSets>
<reportSet>
<reports>
@@ -224,17 +225,17 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jxr-plugin</artifactId>
<version>2.4</version>
<version>${reporting.jxr-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>cobertura-maven-plugin</artifactId>
<version>2.6</version>
<version>${reporting.cobertura-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-report-plugin</artifactId>
<version>2.16</version>
<version>${reporting.surefire-report-plugin.version}</version>
<reportSets>
<reportSet>
<reports>
@@ -246,7 +247,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>taglist-maven-plugin</artifactId>
<version>2.4</version>
<version>${reporting.taglist-plugin.version}</version>
<configuration>
<tagListOptions>
<tagClasses>
@@ -270,7 +271,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>2.11</version>
<version>${reporting.checkstyle-plugin.version}</version>
<configuration>
<enableRulesSummary>false</enableRulesSummary>
<enableFilesSummary>false</enableFilesSummary>
@@ -283,7 +284,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-pmd-plugin</artifactId>
<version>3.1</version>
<version>${reporting.pmd-plugin.version}</version>
<configuration>
<targetJdk>1.6</targetJdk>
<linkXref>true</linkXref>
@@ -302,7 +303,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
<version>2.5.3</version>
<version>${reporting.findbugs-plugin.version}</version>
</plugin>
</plugins>
</reporting>
@@ -310,7 +311,6 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<version>1.2</version>
</dependency>
<dependency>
<groupId>org.owasp</groupId>
@@ -322,5 +322,17 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<artifactId>dependency-check-utils</artifactId>
<version>${project.parent.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</dependency>
</dependencies>
</project>

View File

@@ -3,8 +3,7 @@
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2
http://maven.apache.org/xsd/assembly-1.1.2.xsd"
>
http://maven.apache.org/xsd/assembly-1.1.2.xsd">
<id>release</id>
<formats>
<format>zip</format>
@@ -12,25 +11,41 @@
<includeBaseDirectory>false</includeBaseDirectory>
<fileSets>
<fileSet>
<outputDirectory>/</outputDirectory>
<directory>${project.build.directory}/release</directory>
<outputDirectory>dependency-check/bin</outputDirectory>
<directory>${project.build.directory}/release/bin</directory>
<includes>
<include>*.sh</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
<fileSet>
<outputDirectory>dependency-check/bin</outputDirectory>
<directory>${project.build.directory}/release/bin</directory>
<includes>
<include>*.bat</include>
</includes>
</fileSet>
<fileSet>
<outputDirectory>dependency-check/repo</outputDirectory>
<directory>${project.build.directory}/release/repo</directory>
</fileSet>
<fileSet>
<outputDirectory>dependency-check</outputDirectory>
<includes>
<include>LICENSE*</include>
<include>NOTICE*</include>
</includes>
</fileSet>
<fileSet>
<outputDirectory>licenses</outputDirectory>
<outputDirectory>dependency-check/licenses</outputDirectory>
<directory>${basedir}/src/main/resources/META-INF/licenses</directory>
</fileSet>
<fileSet>
<outputDirectory>licenses</outputDirectory>
<outputDirectory>dependency-check/licenses</outputDirectory>
<directory>${basedir}/../dependency-check-core/src/main/resources/META-INF/licenses</directory>
</fileSet>
<fileSet>
<outputDirectory>/</outputDirectory>
<outputDirectory>dependency-check</outputDirectory>
<directory>${basedir}</directory>
<includes>
<include>README.md</include>

View File

@@ -17,17 +17,15 @@
*/
package org.owasp.dependencycheck;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.encoder.PatternLayoutEncoder;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.cli.ParseException;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
@@ -35,8 +33,11 @@ import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.org.apache.tools.ant.DirectoryScanner;
import org.owasp.dependencycheck.reporting.ReportGenerator;
import org.owasp.dependencycheck.utils.LogUtils;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ch.qos.logback.core.FileAppender;
import org.slf4j.impl.StaticLoggerBinder;
/**
* The command line interface for the DependencyCheck application.
@@ -45,15 +46,10 @@ import org.owasp.dependencycheck.utils.Settings;
*/
public class App {
/**
* The location of the log properties configuration file.
*/
private static final String LOG_PROPERTIES_FILE = "log.properties";
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(App.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(App.class);
/**
* The main method for the application.
@@ -90,8 +86,9 @@ public class App {
return;
}
final InputStream in = App.class.getClassLoader().getResourceAsStream(LOG_PROPERTIES_FILE);
LogUtils.prepareLogger(in, cli.getVerboseLog());
if (cli.getVerboseLog() != null) {
prepareLogger(cli.getVerboseLog());
}
if (cli.isGetVersion()) {
cli.printVersionInfo();
@@ -101,9 +98,10 @@ public class App {
} else if (cli.isRunScan()) {
populateSettings(cli);
try {
runScan(cli.getReportDirectory(), cli.getReportFormat(), cli.getApplicationName(), cli.getScanFiles(), cli.getExcludeList());
runScan(cli.getReportDirectory(), cli.getReportFormat(), cli.getApplicationName(), cli.getScanFiles(),
cli.getExcludeList(), cli.getSymLinkDepth());
} catch (InvalidScanPathException ex) {
LOGGER.log(Level.SEVERE, "An invalid scan path was detected; unable to scan '//*' paths");
LOGGER.error("An invalid scan path was detected; unable to scan '//*' paths");
}
} else {
cli.printHelp();
@@ -118,55 +116,51 @@ public class App {
* @param applicationName the application name for the report
* @param files the files/directories to scan
* @param excludes the patterns for files/directories to exclude
* @param symLinkDepth the depth that symbolic links will be followed
*
* @throws InvalidScanPathException thrown if the path to scan starts with "//"
*/
private void runScan(String reportDirectory, String outputFormat, String applicationName, String[] files,
String[] excludes) throws InvalidScanPathException {
String[] excludes, int symLinkDepth) throws InvalidScanPathException {
Engine engine = null;
try {
engine = new Engine();
List<String> antStylePaths = new ArrayList<String>();
if (excludes == null || excludes.length == 0) {
for (String file : files) {
if (file.contains("*") || file.contains("?")) {
antStylePaths.add(file);
} else {
engine.scan(file);
}
}
} else {
antStylePaths = Arrays.asList(files);
final List<String> antStylePaths = new ArrayList<String>();
for (String file : files) {
final String antPath = ensureCanonicalPath(file);
antStylePaths.add(antPath);
}
final Set<File> paths = new HashSet<File>();
for (String file : antStylePaths) {
LOGGER.debug("Scanning {}", file);
final DirectoryScanner scanner = new DirectoryScanner();
String include = file.replace('\\', '/');
File baseDir;
if (include.startsWith("//")) {
throw new InvalidScanPathException("Unable to scan paths specified by //");
} else if (include.startsWith("./")) {
baseDir = new File(".");
include = include.substring(2);
} else if (include.startsWith("/")) {
baseDir = new File("/");
include = include.substring(1);
} else if (include.contains("/")) {
final int pos = include.indexOf('/');
final String tmp = include.substring(0, pos);
if (tmp.contains("*") || tmp.contains("?")) {
baseDir = new File(".");
} else {
final int pos = getLastFileSeparator(include);
final String tmpBase = include.substring(0, pos);
final String tmpInclude = include.substring(pos + 1);
if (tmpInclude.indexOf('*') >= 0 || tmpInclude.indexOf('?') >= 0
|| (new File(include)).isFile()) {
baseDir = new File(tmpBase);
include = tmpInclude;
} else {
baseDir = new File(tmp);
include = include.substring(pos + 1);
baseDir = new File(tmpBase, tmpInclude);
include = "**/*";
}
} else { //no path info - must just be a file in the working directory
baseDir = new File(".");
}
//LOGGER.debug("baseDir: {}", baseDir);
//LOGGER.debug("include: {}", include);
scanner.setBasedir(baseDir);
scanner.setIncludes(include);
scanner.setMaxLevelsOfSymlinks(symLinkDepth);
if (symLinkDepth <= 0) {
scanner.setFollowSymlinks(false);
}
if (excludes != null && excludes.length > 0) {
scanner.addExcludes(excludes);
}
@@ -174,6 +168,7 @@ public class App {
if (scanner.getIncludedFilesCount() > 0) {
for (String s : scanner.getIncludedFiles()) {
final File f = new File(baseDir, s);
LOGGER.debug("Found file {}", f.toString());
paths.add(f);
}
}
@@ -189,7 +184,7 @@ public class App {
cve.open();
prop = cve.getDatabaseProperties();
} catch (DatabaseException ex) {
LOGGER.log(Level.FINE, "Unable to retrieve DB Properties", ex);
LOGGER.debug("Unable to retrieve DB Properties", ex);
} finally {
if (cve != null) {
cve.close();
@@ -199,15 +194,15 @@ public class App {
try {
report.generateReports(reportDirectory, outputFormat);
} catch (IOException ex) {
LOGGER.log(Level.SEVERE, "There was an IO error while attempting to generate the report.");
LOGGER.log(Level.FINE, null, ex);
LOGGER.error("There was an IO error while attempting to generate the report.");
LOGGER.debug("", ex);
} catch (Throwable ex) {
LOGGER.log(Level.SEVERE, "There was an error while attempting to generate the report.");
LOGGER.log(Level.FINE, null, ex);
LOGGER.error("There was an error while attempting to generate the report.");
LOGGER.debug("", ex);
}
} catch (DatabaseException ex) {
LOGGER.log(Level.SEVERE, "Unable to connect to the dependency-check database; analysis has stopped");
LOGGER.log(Level.FINE, "", ex);
LOGGER.error("Unable to connect to the dependency-check database; analysis has stopped");
LOGGER.debug("", ex);
} finally {
if (engine != null) {
engine.cleanup();
@@ -224,8 +219,8 @@ public class App {
engine = new Engine();
engine.doUpdates();
} catch (DatabaseException ex) {
LOGGER.log(Level.SEVERE, "Unable to connect to the dependency-check database; analysis has stopped");
LOGGER.log(Level.FINE, "", ex);
LOGGER.error("Unable to connect to the dependency-check database; analysis has stopped");
LOGGER.debug("", ex);
} finally {
if (engine != null) {
engine.cleanup();
@@ -253,7 +248,9 @@ public class App {
final boolean jarDisabled = cli.isJarDisabled();
final boolean archiveDisabled = cli.isArchiveDisabled();
final boolean pyDistDisabled = cli.isPythonDistributionDisabled();
final boolean cMakeDisabled = cli.isCmakeDisabled();
final boolean pyPkgDisabled = cli.isPythonPackageDisabled();
final boolean autoconfDisabled = cli.isAutoconfDisabled();
final boolean assemblyDisabled = cli.isAssemblyDisabled();
final boolean nuspecDisabled = cli.isNuspecDisabled();
final boolean centralDisabled = cli.isCentralDisabled();
@@ -266,18 +263,20 @@ public class App {
final String databasePassword = cli.getDatabasePassword();
final String additionalZipExtensions = cli.getAdditionalZipExtensions();
final String pathToMono = cli.getPathToMono();
final String cveMod12 = cli.getModifiedCve12Url();
final String cveMod20 = cli.getModifiedCve20Url();
final String cveBase12 = cli.getBaseCve12Url();
final String cveBase20 = cli.getBaseCve20Url();
if (propertiesFile != null) {
try {
Settings.mergeProperties(propertiesFile);
} catch (FileNotFoundException ex) {
final String msg = String.format("Unable to load properties file '%s'", propertiesFile.getPath());
LOGGER.log(Level.SEVERE, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.error("Unable to load properties file '{}'", propertiesFile.getPath());
LOGGER.debug("", ex);
} catch (IOException ex) {
final String msg = String.format("Unable to find properties file '%s'", propertiesFile.getPath());
LOGGER.log(Level.SEVERE, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.error("Unable to find properties file '{}'", propertiesFile.getPath());
LOGGER.debug("", ex);
}
}
// We have to wait until we've merged the properties before attempting to set whether we use
@@ -321,8 +320,11 @@ public class App {
Settings.setBoolean(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, !archiveDisabled);
Settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, !pyDistDisabled);
Settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, !pyPkgDisabled);
Settings.setBoolean(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, !autoconfDisabled);
Settings.setBoolean(Settings.KEYS.ANALYZER_CMAKE_ENABLED, !cMakeDisabled);
Settings.setBoolean(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, !nuspecDisabled);
Settings.setBoolean(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, !assemblyDisabled);
Settings.setBoolean(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, !cli.isOpenSSLDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, !centralDisabled);
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, !nexusDisabled);
@@ -351,5 +353,99 @@ public class App {
if (pathToMono != null && !pathToMono.isEmpty()) {
Settings.setString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
}
if (cveBase12 != null && !cveBase12.isEmpty()) {
Settings.setString(Settings.KEYS.CVE_SCHEMA_1_2, cveBase12);
Settings.setString(Settings.KEYS.CVE_SCHEMA_2_0, cveBase20);
Settings.setString(Settings.KEYS.CVE_MODIFIED_12_URL, cveMod12);
Settings.setString(Settings.KEYS.CVE_MODIFIED_20_URL, cveMod20);
}
}
/**
* Creates a file appender and adds it to logback.
*
* @param verboseLog the path to the verbose log file
*/
private void prepareLogger(String verboseLog) {
final StaticLoggerBinder loggerBinder = StaticLoggerBinder.getSingleton();
final LoggerContext context = (LoggerContext) loggerBinder.getLoggerFactory();
final PatternLayoutEncoder encoder = new PatternLayoutEncoder();
encoder.setPattern("%d %C:%L%n%-5level - %msg%n");
encoder.setContext(context);
encoder.start();
final FileAppender fa = new FileAppender();
fa.setAppend(true);
fa.setEncoder(encoder);
fa.setContext(context);
fa.setFile(verboseLog);
final File f = new File(verboseLog);
String name = f.getName();
final int i = name.lastIndexOf('.');
if (i > 1) {
name = name.substring(0, i);
}
fa.setName(name);
fa.start();
final ch.qos.logback.classic.Logger rootLogger = context.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME);
rootLogger.addAppender(fa);
}
/**
* Takes a path and resolves it to be a canonical & absolute path. The caveats are that this method will take an Ant style
* file selector path (../someDir/**\/*.jar) and convert it to an absolute/canonical path (at least to the left of the first *
* or ?).
*
* @param path the path to canonicalize
* @return the canonical path
*/
protected String ensureCanonicalPath(String path) {
String basePath = null;
String wildCards = null;
final String file = path.replace('\\', '/');
if (file.contains("*") || file.contains("?")) {
int pos = getLastFileSeparator(file);
if (pos < 0) {
return file;
}
pos += 1;
basePath = file.substring(0, pos);
wildCards = file.substring(pos);
} else {
basePath = file;
}
File f = new File(basePath);
try {
f = f.getCanonicalFile();
if (wildCards != null) {
f = new File(f, wildCards);
}
} catch (IOException ex) {
LOGGER.warn("Invalid path '{}' was provided.", path);
LOGGER.debug("Invalid path provided", ex);
}
return f.getAbsolutePath().replace('\\', '/');
}
/**
* Returns the position of the last file separator.
*
* @param file a file path
* @return the position of the last file separator
*/
private int getLastFileSeparator(String file) {
if (file.contains("*") || file.contains("?")) {
int p1 = file.indexOf('*');
int p2 = file.indexOf('?');
p1 = p1 > 0 ? p1 : file.length();
p2 = p2 > 0 ? p2 : file.length();
int pos = p1 < p2 ? p1 : p2;
pos = file.lastIndexOf('/', pos);
return pos;
} else {
return file.lastIndexOf('/');
}
}
}

View File

@@ -19,7 +19,6 @@ package org.owasp.dependencycheck;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.logging.Logger;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -33,6 +32,8 @@ import org.apache.commons.cli.PosixParser;
import org.owasp.dependencycheck.reporting.ReportGenerator.Format;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A utility to parse command line arguments for the DependencyCheck.
@@ -44,7 +45,7 @@ public final class CliParser {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(CliParser.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(CliParser.class);
/**
* The command line.
*/
@@ -109,6 +110,21 @@ public final class CliParser {
throw new ParseException(msg);
}
}
if ((getBaseCve12Url() != null || getBaseCve20Url() != null || getModifiedCve12Url() != null || getModifiedCve20Url() != null)
&& (getBaseCve12Url() == null || getBaseCve20Url() == null || getModifiedCve12Url() == null || getModifiedCve20Url() == null)) {
final String msg = "If one of the CVE URLs is specified they must all be specified; please add the missing CVE URL.";
throw new ParseException(msg);
}
if (line.hasOption((ARGUMENT.SYM_LINK_DEPTH))) {
try {
final int i = Integer.parseInt(line.getOptionValue(ARGUMENT.SYM_LINK_DEPTH));
if (i < 0) {
throw new ParseException("Symbolic Link Depth (symLink) must be greater than zero.");
}
} catch (NumberFormatException ex) {
throw new ParseException("Symbolic Link Depth (symLink) is not a number.");
}
}
}
}
@@ -232,6 +248,10 @@ public final class CliParser {
.withDescription("The file path to write verbose logging information.")
.create(ARGUMENT.VERBOSE_LOG_SHORT);
final Option symLinkDepth = OptionBuilder.withArgName("depth").hasArg().withLongOpt(ARGUMENT.SYM_LINK_DEPTH)
.withDescription("Sets how deep nested symbolic links will be followed; 0 indicates symbolic links will not be followed.")
.create();
final Option suppressionFile = OptionBuilder.withArgName("file").hasArg().withLongOpt(ARGUMENT.SUPPRESSION_FILE)
.withDescription("The file path to the suppression XML file.")
.create();
@@ -252,6 +272,7 @@ public final class CliParser {
.addOption(help)
.addOption(advancedHelp)
.addOption(noUpdate)
.addOption(symLinkDepth)
.addOption(props)
.addOption(verboseLog)
.addOption(suppressionFile);
@@ -267,6 +288,22 @@ public final class CliParser {
@SuppressWarnings("static-access")
private void addAdvancedOptions(final Options options) throws IllegalArgumentException {
final Option cve12Base = OptionBuilder.withArgName("url").hasArg().withLongOpt(ARGUMENT.CVE_BASE_12)
.withDescription("Base URL for each years CVE 1.2, the %d will be replaced with the year. ")
.create();
final Option cve20Base = OptionBuilder.withArgName("url").hasArg().withLongOpt(ARGUMENT.CVE_BASE_20)
.withDescription("Base URL for each years CVE 2.0, the %d will be replaced with the year.")
.create();
final Option cve12Modified = OptionBuilder.withArgName("url").hasArg().withLongOpt(ARGUMENT.CVE_MOD_12)
.withDescription("URL for the modified CVE 1.2.")
.create();
final Option cve20Modified = OptionBuilder.withArgName("url").hasArg().withLongOpt(ARGUMENT.CVE_MOD_20)
.withDescription("URL for the modified CVE 2.0.")
.create();
final Option updateOnly = OptionBuilder.withLongOpt(ARGUMENT.UPDATE_ONLY)
.withDescription("Only update the local NVD data cache; no scan will be executed.").create();
@@ -274,79 +311,9 @@ public final class CliParser {
.withDescription("The location of the H2 Database file. This option should generally not be set.")
.create(ARGUMENT.DATA_DIRECTORY_SHORT);
final Option connectionTimeout = OptionBuilder.withArgName("timeout").hasArg().withLongOpt(ARGUMENT.CONNECTION_TIMEOUT)
.withDescription("The connection timeout (in milliseconds) to use when downloading resources.")
.create(ARGUMENT.CONNECTION_TIMEOUT_SHORT);
final Option proxyServer = OptionBuilder.withArgName("server").hasArg().withLongOpt(ARGUMENT.PROXY_SERVER)
.withDescription("The proxy server to use when downloading resources.")
.create();
final Option proxyPort = OptionBuilder.withArgName("port").hasArg().withLongOpt(ARGUMENT.PROXY_PORT)
.withDescription("The proxy port to use when downloading resources.")
.create();
final Option proxyUsername = OptionBuilder.withArgName("user").hasArg().withLongOpt(ARGUMENT.PROXY_USERNAME)
.withDescription("The proxy username to use when downloading resources.")
.create();
final Option proxyPassword = OptionBuilder.withArgName("pass").hasArg().withLongOpt(ARGUMENT.PROXY_PASSWORD)
.withDescription("The proxy password to use when downloading resources.")
.create();
final Option connectionString = OptionBuilder.withArgName("connStr").hasArg().withLongOpt(ARGUMENT.CONNECTION_STRING)
.withDescription("The connection string to the database.")
.create();
final Option dbUser = OptionBuilder.withArgName("user").hasArg().withLongOpt(ARGUMENT.DB_NAME)
.withDescription("The username used to connect to the database.")
.create();
final Option dbPassword = OptionBuilder.withArgName("password").hasArg().withLongOpt(ARGUMENT.DB_PASSWORD)
.withDescription("The password for connecting to the database.")
.create();
final Option dbDriver = OptionBuilder.withArgName("driver").hasArg().withLongOpt(ARGUMENT.DB_DRIVER)
.withDescription("The database driver name.")
.create();
final Option dbDriverPath = OptionBuilder.withArgName("path").hasArg().withLongOpt(ARGUMENT.DB_DRIVER_PATH)
.withDescription("The path to the database driver; note, this does not need to be set unless the JAR is outside of the classpath.")
.create();
final Option disableJarAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_JAR)
.withDescription("Disable the Jar Analyzer.")
.create();
final Option disableArchiveAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_ARCHIVE)
.withDescription("Disable the Archive Analyzer.")
.create();
final Option disableNuspecAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_NUSPEC)
.withDescription("Disable the Nuspec Analyzer.")
.create();
final Option disableAssemblyAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_ASSEMBLY)
.withDescription("Disable the .NET Assembly Analyzer.")
.create();
final Option disablePythonDistributionAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_PY_DIST)
.withDescription("Disable the Python Distribution Analyzer.").create();
final Option disablePythonPackageAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_PY_PKG)
.withDescription("Disable the Python Package Analyzer.").create();
final Option disableCentralAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_CENTRAL)
.withDescription("Disable the Central Analyzer. If this analyzer is disabled it is likely you also want to disable "
+ "the Nexus Analyzer.")
.create();
final Option disableNexusAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_NEXUS)
.withDescription("Disable the Nexus Analyzer.")
.create();
final Option nexusUrl = OptionBuilder.withArgName("url").hasArg().withLongOpt(ARGUMENT.NEXUS_URL)
.withDescription("The url to the Nexus Server's REST API Endpoint (http://domain/nexus/service/local). "
+ "If not set the Nexus Analyzer will be disabled.")
.create();
+ "If not set the Nexus Analyzer will be disabled.").create();
final Option nexusUsesProxy = OptionBuilder.withArgName("true/false").hasArg().withLongOpt(ARGUMENT.NEXUS_USES_PROXY)
.withDescription("Whether or not the configured proxy should be used when connecting to Nexus.")
@@ -355,14 +322,83 @@ public final class CliParser {
final Option additionalZipExtensions = OptionBuilder.withArgName("extensions").hasArg()
.withLongOpt(ARGUMENT.ADDITIONAL_ZIP_EXTENSIONS)
.withDescription("A comma separated list of additional extensions to be scanned as ZIP files "
+ "(ZIP, EAR, WAR are already treated as zip files)")
.create();
+ "(ZIP, EAR, WAR are already treated as zip files)").create();
final Option pathToMono = OptionBuilder.withArgName("path").hasArg().withLongOpt(ARGUMENT.PATH_TO_MONO)
.withDescription("The path to Mono for .NET Assembly analysis on non-windows systems.")
.create();
final Option connectionTimeout = OptionBuilder.withArgName("timeout").hasArg().withLongOpt(ARGUMENT.CONNECTION_TIMEOUT)
.withDescription("The connection timeout (in milliseconds) to use when downloading resources.")
.create(ARGUMENT.CONNECTION_TIMEOUT_SHORT);
final Option proxyServer = OptionBuilder.withArgName("server").hasArg().withLongOpt(ARGUMENT.PROXY_SERVER)
.withDescription("The proxy server to use when downloading resources.").create();
final Option proxyPort = OptionBuilder.withArgName("port").hasArg().withLongOpt(ARGUMENT.PROXY_PORT)
.withDescription("The proxy port to use when downloading resources.").create();
final Option proxyUsername = OptionBuilder.withArgName("user").hasArg().withLongOpt(ARGUMENT.PROXY_USERNAME)
.withDescription("The proxy username to use when downloading resources.").create();
final Option proxyPassword = OptionBuilder.withArgName("pass").hasArg().withLongOpt(ARGUMENT.PROXY_PASSWORD)
.withDescription("The proxy password to use when downloading resources.").create();
final Option connectionString = OptionBuilder.withArgName("connStr").hasArg().withLongOpt(ARGUMENT.CONNECTION_STRING)
.withDescription("The connection string to the database.").create();
final Option dbUser = OptionBuilder.withArgName("user").hasArg().withLongOpt(ARGUMENT.DB_NAME)
.withDescription("The username used to connect to the database.").create();
final Option dbPassword = OptionBuilder.withArgName("password").hasArg().withLongOpt(ARGUMENT.DB_PASSWORD)
.withDescription("The password for connecting to the database.").create();
final Option dbDriver = OptionBuilder.withArgName("driver").hasArg().withLongOpt(ARGUMENT.DB_DRIVER)
.withDescription("The database driver name.").create();
final Option dbDriverPath = OptionBuilder.withArgName("path").hasArg().withLongOpt(ARGUMENT.DB_DRIVER_PATH)
.withDescription("The path to the database driver; note, this does not need to be set unless the JAR is outside of the classpath.")
.create();
final Option disableJarAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_JAR)
.withDescription("Disable the Jar Analyzer.").create();
final Option disableArchiveAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_ARCHIVE)
.withDescription("Disable the Archive Analyzer.").create();
final Option disableNuspecAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_NUSPEC)
.withDescription("Disable the Nuspec Analyzer.").create();
final Option disableAssemblyAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_ASSEMBLY)
.withDescription("Disable the .NET Assembly Analyzer.").create();
final Option disablePythonDistributionAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_PY_DIST)
.withDescription("Disable the Python Distribution Analyzer.").create();
final Option disablePythonPackageAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_PY_PKG)
.withDescription("Disable the Python Package Analyzer.").create();
final Option disableAutoconfAnalyzer = OptionBuilder
.withLongOpt(ARGUMENT.DISABLE_AUTOCONF)
.withDescription("Disable the Autoconf Analyzer.").create();
final Option disableOpenSSLAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_OPENSSL)
.withDescription("Disable the OpenSSL Analyzer.").create();
final Option disableCmakeAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_CMAKE).
withDescription("Disable the Cmake Analyzer.").create();
final Option disableCentralAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_CENTRAL)
.withDescription("Disable the Central Analyzer. If this analyzer is disabled it is likely you also want to disable "
+ "the Nexus Analyzer.").create();
final Option disableNexusAnalyzer = OptionBuilder.withLongOpt(ARGUMENT.DISABLE_NEXUS)
.withDescription("Disable the Nexus Analyzer.").create();
options.addOption(updateOnly)
.addOption(cve12Base)
.addOption(cve20Base)
.addOption(cve12Modified)
.addOption(cve20Modified)
.addOption(proxyPort)
.addOption(proxyServer)
.addOption(proxyUsername)
@@ -378,7 +414,10 @@ public final class CliParser {
.addOption(disableArchiveAnalyzer)
.addOption(disableAssemblyAnalyzer)
.addOption(disablePythonDistributionAnalyzer)
.addOption(disableCmakeAnalyzer)
.addOption(disablePythonPackageAnalyzer)
.addOption(disableAutoconfAnalyzer)
.addOption(disableOpenSSLAnalyzer)
.addOption(disableNuspecAnalyzer)
.addOption(disableCentralAnalyzer)
.addOption(disableNexusAnalyzer)
@@ -395,7 +434,7 @@ public final class CliParser {
* @param options a collection of command line arguments
* @throws IllegalArgumentException thrown if there is an exception
*/
@SuppressWarnings("static-access")
@SuppressWarnings({"static-access", "deprecation"})
private void addDeprecatedOptions(final Options options) throws IllegalArgumentException {
final Option proxyServer = OptionBuilder.withArgName("url").hasArg().withLongOpt(ARGUMENT.PROXY_URL)
@@ -432,6 +471,24 @@ public final class CliParser {
return (line != null) && isValid && line.hasOption(ARGUMENT.SCAN);
}
/**
* Returns the symbolic link depth (how deeply symbolic links will be followed).
*
* @return the symbolic link depth
*/
public int getSymLinkDepth() {
int value = 0;
try {
value = Integer.parseInt(line.getOptionValue(ARGUMENT.SYM_LINK_DEPTH, "0"));
if (value < 0) {
value = 0;
}
} catch (NumberFormatException ex) {
LOGGER.debug("Symbolic link was not a number");
}
return value;
}
/**
* Returns true if the disableJar command line argument was specified.
*
@@ -486,6 +543,24 @@ public final class CliParser {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_PY_PKG);
}
/**
* Returns true if the disableCmake command line argument was specified.
*
* @return true if the disableCmake command line argument was specified; otherwise false
*/
public boolean isCmakeDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_CMAKE);
}
/**
* Returns true if the disableAutoconf command line argument was specified.
*
* @return true if the disableAutoconf command line argument was specified; otherwise false
*/
public boolean isAutoconfDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_AUTOCONF);
}
/**
* Returns true if the disableNexus command line argument was specified.
*
@@ -495,6 +570,15 @@ public final class CliParser {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_NEXUS);
}
/**
* Returns true if the disableOpenSSL command line argument was specified.
*
* @return true if the disableOpenSSL command line argument was specified; otherwise false
*/
public boolean isOpenSSLDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_OPENSSL);
}
/**
* Returns true if the disableCentral command line argument was specified.
*
@@ -613,6 +697,42 @@ public final class CliParser {
return line.getOptionValue(ARGUMENT.APP_NAME);
}
/**
* Returns the base URL for the CVE 1.2 XMl file.
*
* @return the URL to the CVE 1.2 XML file.
*/
public String getBaseCve12Url() {
return line.getOptionValue(ARGUMENT.CVE_BASE_12);
}
/**
* Returns the base URL for the CVE 2.0 XMl file.
*
* @return the URL to the CVE 2.0 XML file.
*/
public String getBaseCve20Url() {
return line.getOptionValue(ARGUMENT.CVE_BASE_20);
}
/**
* Returns the URL for the modified CVE 1.2 XMl file.
*
* @return the URL to the modified CVE 1.2 XML file.
*/
public String getModifiedCve12Url() {
return line.getOptionValue(ARGUMENT.CVE_MOD_12);
}
/**
* Returns the URL for the modified CVE 2.0 XMl file.
*
* @return the URL to the modified CVE 2.0 XML file.
*/
public String getModifiedCve20Url() {
return line.getOptionValue(ARGUMENT.CVE_MOD_20);
}
/**
* Returns the connection timeout.
*
@@ -627,13 +747,14 @@ public final class CliParser {
*
* @return the proxy server
*/
@SuppressWarnings("deprecation")
public String getProxyServer() {
String server = line.getOptionValue(ARGUMENT.PROXY_SERVER);
if (server == null) {
server = line.getOptionValue(ARGUMENT.PROXY_URL);
if (server != null) {
LOGGER.warning("An old command line argument 'proxyurl' was detected; use proxyserver instead");
LOGGER.warn("An old command line argument 'proxyurl' was detected; use proxyserver instead");
}
}
return server;
@@ -871,7 +992,7 @@ public final class CliParser {
/**
* The CLI argument name indicating the proxy url.
*
* @deprecated use {@link org.owasp.dependencycheck.cli.CliParser.ArgumentName#PROXY_SERVER} instead
* @deprecated use {@link #PROXY_SERVER} instead
*/
@Deprecated
public static final String PROXY_URL = "proxyurl";
@@ -903,6 +1024,22 @@ public final class CliParser {
* The CLI argument name for setting the location of the data directory.
*/
public static final String DATA_DIRECTORY = "data";
/**
* The CLI argument name for setting the URL for the CVE Data Files.
*/
public static final String CVE_MOD_12 = "cveUrl12Modified";
/**
* The CLI argument name for setting the URL for the CVE Data Files.
*/
public static final String CVE_MOD_20 = "cveUrl20Modified";
/**
* The CLI argument name for setting the URL for the CVE Data Files.
*/
public static final String CVE_BASE_12 = "cveUrl12Base";
/**
* The CLI argument name for setting the URL for the CVE Data Files.
*/
public static final String CVE_BASE_20 = "cveUrl20Base";
/**
* The short CLI argument name for setting the location of the data directory.
*/
@@ -915,6 +1052,11 @@ public final class CliParser {
* The short CLI argument name for setting the location of the data directory.
*/
public static final String VERBOSE_LOG_SHORT = "l";
/**
* The CLI argument name for setting the depth of symbolic links that will be followed.
*/
public static final String SYM_LINK_DEPTH = "symLink";
/**
* The CLI argument name for setting the location of the suppression file.
*/
@@ -935,6 +1077,14 @@ public final class CliParser {
* Disables the Python Package Analyzer.
*/
public static final String DISABLE_PY_PKG = "disablePyPkg";
/**
* Disables the Autoconf Analyzer.
*/
public static final String DISABLE_AUTOCONF = "disableAutoconf";
/**
* Disables the Cmake Analyzer.
*/
public static final String DISABLE_CMAKE = "disableCmake";
/**
* Disables the Assembly Analyzer.
*/
@@ -951,6 +1101,10 @@ public final class CliParser {
* Disables the Nexus Analyzer.
*/
public static final String DISABLE_NEXUS = "disableNexus";
/**
* Disables the OpenSSL Analyzer.
*/
public static final String DISABLE_OPENSSL = "disableOpenSSL";
/**
* The URL of the nexus server.
*/

View File

@@ -1,22 +0,0 @@
handlers=java.util.logging.ConsoleHandler
#, java.util.logging.FileHandler
# logging levels
# FINEST, FINER, FINE, CONFIG, INFO, WARNING and SEVERE.
# Configure the ConsoleHandler.
java.util.logging.ConsoleHandler.level=INFO
# Configure the FileHandler.
java.util.logging.FileHandler.formatter=java.util.logging.SimpleFormatter
java.util.logging.FileHandler.level=FINE
# The following special tokens can be used in the pattern property
# which specifies the location and name of the log file.
# / - standard path separator
# %t - system temporary directory
# %h - value of the user.home system property
# %g - generation number for rotating logs
# %u - unique number to avoid conflicts
# FileHandler writes to %h/demo0.log by default.
java.util.logging.FileHandler.pattern=./dependency-check.log

View File

@@ -0,0 +1,16 @@
<configuration>
<contextName>dependency-check</contextName>
<!-- Logging configuration -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<Target>System.out</Target>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<encoder>
<pattern>[%level] %msg%n</pattern>
</encoder>
</appender>
<root level="DEBUG">
<appender-ref ref="console"/>
</root>
</configuration>

View File

@@ -7,7 +7,8 @@ Short | Argument&nbsp;Name&nbsp;&nbsp; | Parameter | Description | Requir
-------|-----------------------|-----------------|-------------|------------
\-a | \-\-app | \<name\> | The name of the application being scanned. This is a required argument. | Required
\-s | \-\-scan | \<path\> | The path to scan \- this option can be specified multiple times. It is also possible to specify Ant style paths (e.g. directory/**/*.jar). | Required
| \-\-exclude | \<pattern\> | The path patterns to exclude from the scan \- this option can be specified multiple times. This accepts Ant style path patterns (e.g. **/exclude/**) . | Optional
| \-\-exclude | \<pattern\> | The path patterns to exclude from the scan \- this option can be specified multiple times. This accepts Ant style path patterns (e.g. **/exclude/**). | Optional
| \-\-symLink | \<depth\> | The depth that symbolic links will be followed; the default is 0 meaning symbolic links will not be followed. | Optional
\-o | \-\-out | \<path\> | The folder to write reports to. This defaults to the current directory. If the format is not set to ALL one could specify a specific file name. | Optional
\-f | \-\-format | \<format\> | The output format to write to (XML, HTML, VULN, ALL). The default is HTML. | Required
\-l | \-\-log | \<file\> | The file path to write verbose logging information. | Optional
@@ -21,10 +22,17 @@ Advanced Options
================
Short | Argument&nbsp;Name&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; | Parameter | Description | Default&nbsp;Value
-------|-----------------------|-----------------|----------------------------------------------------------------------------------|-------------------
| \-\-cveUrl12Modified | \<url\> | URL for the modified CVE 1.2 | http://nvd.nist.gov/download/nvdcve-modified.xml
| \-\-cveUrl20Modified | \<url\> | URL for the modified CVE 2.0 | http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-modified.xml
| \-\-cveUrl12Base | \<url\> | Base URL for each year's CVE 1.2, the %d will be replaced with the year | http://nvd.nist.gov/download/nvdcve-%d.xml
| \-\-cveUrl20Base | \<url\> | Base URL for each year's CVE 2.0, the %d will be replaced with the year | http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml
\-P | \-\-propertyfile | \<file\> | Specifies a file that contains properties to use instead of applicaion defaults. | &nbsp;
| \-\-updateonly | | If set only the update phase of dependency-check will be executed; no scan will be executed and no report will be generated. | &nbsp;
| \-\-disablePyDist | | Sets whether the Python Distribution Analyzer will be used. | false
| \-\-disablePyPkg | | Sets whether the Python Package Analyzer will be used. | false
| \-\-disableAutoconf | | Sets whether the Autoconf Analyzer will be used. | false
| \-\-disableOpenSSL | | Sets whether the OpenSSL Analyzer will be used. | false
| \-\-disableCmake | | Sets whether the Cmake Analyzer will be used. | false
| \-\-disableArchive | | Sets whether the Archive Analyzer will be used. | false
| \-\-zipExtensions | \<strings\> | A comma-separated list of additional file extensions to be treated like a ZIP file, the contents will be extracted and analyzed. | &nbsp;
| \-\-disableJar | | Sets whether the Jar Analyzer will be used. | false
@@ -34,7 +42,7 @@ Short | Argument&nbsp;Name&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; | Paramete
| \-\-nexusUsesProxy | \<true\|false\> | Whether or not the defined proxy should be used when connecting to Nexus. | true
| \-\-disableNuspec | | Sets whether or not the .NET Nuget Nuspec Analyzer will be used. | false
| \-\-disableAssembly | | Sets whether or not the .NET Assembly Analyzer should be used. | false
| \-\-pathToMono | \<path\> | The path to Mono for .NET Assembly analysis on non-windows systems. | &nbsp;
| \-\-mono | \<path\> | The path to Mono for .NET Assembly analysis on non-windows systems. | &nbsp;
| \-\-proxyserver | \<server\> | The proxy server to use when downloading resources. | &nbsp;
| \-\-proxyport | \<port\> | The proxy port to use when downloading resources. | &nbsp;
| \-\-connectiontimeout | \<timeout\> | The connection timeout (in milliseconds) to use when downloading resources. | &nbsp;

View File

@@ -0,0 +1,73 @@
/*
* Copyright 2015 OWASP.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.owasp.dependencycheck;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author jeremy
*/
public class AppTest {
public AppTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of ensureCanonicalPath method, of class App.
*/
@Test
public void testEnsureCanonicalPath() {
String file = "../*.jar";
App instance = new App();
String result = instance.ensureCanonicalPath(file);
assertFalse(result.contains(".."));
assertTrue(result.endsWith("*.jar"));
}
/**
* Test of ensureCanonicalPath method, of class App.
*/
@Test
public void testEnsureCanonicalPath2() {
String file = "../some/skip/../path/file.txt";
App instance = new App();
String expResult = "/some/path/file.txt";
String result = instance.ensureCanonicalPath(file);
assertTrue("result=" + result, result.endsWith(expResult));
}
}

View File

@@ -20,7 +20,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<parent>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId>
<version>1.2.11</version>
<version>1.3.0</version>
</parent>
<artifactId>dependency-check-core</artifactId>
@@ -101,7 +101,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
</goals>
<configuration>
<outputDirectory>${project.build.directory}/test-classes</outputDirectory>
<includeScope>provided</includeScope>
<includeScope>test</includeScope>
</configuration>
</execution>
</executions>
@@ -205,6 +205,10 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<name>data.directory</name>
<value>${project.build.directory}/data</value>
</property>
<property>
<name>temp.directory</name>
<value>${project.build.directory}/temp</value>
</property>
</systemProperties>
</configuration>
</plugin>
@@ -222,7 +226,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-project-info-reports-plugin</artifactId>
<version>2.7</version>
<version>${reporting.project-info-reports-plugin.version}</version>
<reportSets>
<reportSet>
<reports>
@@ -236,7 +240,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>2.9.1</version>
<version>${reporting.javadoc-plugin.version}</version>
<configuration>
<failOnError>false</failOnError>
<bottom>Copyright© 2012-15 Jeremy Long. All Rights Reserved.</bottom>
@@ -253,7 +257,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>versions-maven-plugin</artifactId>
<version>2.1</version>
<version>${reporting.versions-plugin.version}</version>
<reportSets>
<reportSet>
<reports>
@@ -266,17 +270,17 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jxr-plugin</artifactId>
<version>2.4</version>
<version>${reporting.jxr-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>cobertura-maven-plugin</artifactId>
<version>2.6</version>
<version>${reporting.cobertura-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-report-plugin</artifactId>
<version>2.16</version>
<version>${reporting.surefire-report-plugin.version}</version>
<reportSets>
<reportSet>
<reports>
@@ -295,7 +299,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>taglist-maven-plugin</artifactId>
<version>2.4</version>
<version>${reporting.taglist-plugin.version}</version>
<configuration>
<tagListOptions>
<tagClasses>
@@ -319,7 +323,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>2.11</version>
<version>${reporting.checkstyle-plugin.version}</version>
<configuration>
<enableRulesSummary>false</enableRulesSummary>
<enableFilesSummary>false</enableFilesSummary>
@@ -332,7 +336,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-pmd-plugin</artifactId>
<version>3.1</version>
<version>${reporting.pmd-plugin.version}</version>
<configuration>
<targetJdk>1.6</targetJdk>
<linkXref>true</linkXref>
@@ -351,12 +355,38 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
<version>2.5.3</version>
<version>${reporting.findbugs-plugin.version}</version>
</plugin>
</plugins>
</reporting>
<dependencies>
<!-- Note, to stay compatible with Jenkins installations only JARs compiled to 1.6 can be used -->
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>annotations</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<!-- Set this to test so that each project that uses this has to have its own implementation of SLF4J -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<scope>test</scope>
</dependency>
<!-- For the CAL10N support -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-ext</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-utils</artifactId>
@@ -365,7 +395,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-test-framework</artifactId>
<version>${apache.lucene.version}</version>
<scope>test</scope>
</dependency>
<dependency>
@@ -373,120 +402,109 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<artifactId>jmockit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>annotations</artifactId>
<version>3.0.0</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
<version>1.9</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.4</version>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.6</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
<version>${apache.lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-common</artifactId>
<version>${apache.lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queryparser</artifactId>
<version>${apache.lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.velocity</groupId>
<artifactId>velocity</artifactId>
<version>1.7</version>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>1.3.176</version>
</dependency>
<dependency>
<groupId>org.jsoup</groupId>
<artifactId>jsoup</artifactId>
<version>1.7.2</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>com.sun.mail</groupId>
<artifactId>mailapi</artifactId>
</dependency>
<!-- The following dependencies are only used during testing -->
<dependency>
<groupId>org.apache.maven.scm</groupId>
<artifactId>maven-scm-provider-cvsexe</artifactId>
<version>1.8.1</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
<version>2.5.5</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.security</groupId>
<artifactId>spring-security-web</artifactId>
<version>3.0.0.RELEASE</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.hazelcast</groupId>
<artifactId>hazelcast</artifactId>
<version>2.5</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>net.sf.ehcache</groupId>
<artifactId>ehcache-core</artifactId>
<version>2.2.0</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.struts</groupId>
<artifactId>struts2-core</artifactId>
<version>2.1.2</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jetty</artifactId>
<version>6.1.0</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.axis2</groupId>
<artifactId>axis2-spring</artifactId>
<version>1.4.1</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.axis2</groupId>
<artifactId>axis2-adb</artifactId>
<version>1.4.1</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
@@ -494,7 +512,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<artifactId>daytrader-ear</artifactId>
<version>2.1.7</version>
<type>ear</type>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
@@ -502,7 +520,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<artifactId>war</artifactId>
<version>4.0</version>
<type>war</type>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
@@ -510,42 +528,37 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<artifactId>dojo-war</artifactId>
<version>1.3.0</version>
<type>war</type>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.openjpa</groupId>
<artifactId>openjpa</artifactId>
<version>2.0.1</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.google.inject</groupId>
<artifactId>guice</artifactId>
<version>3.0</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.retry</groupId>
<artifactId>spring-retry</artifactId>
<version>1.1.0.RELEASE</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>uk.ltd.getahead</groupId>
<artifactId>dwr</artifactId>
<version>1.1.1</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.sun.mail</groupId>
<artifactId>mailapi</artifactId>
<version>1.5.2</version>
</dependency>
</dependencies>
<profiles>
<profile>
@@ -553,7 +566,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<activation>
<property>
<name>mysql</name>
<!--value>test</value-->
</property>
</activation>
<build>
@@ -617,158 +629,150 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<groupId>org.apache.xmlgraphics</groupId>
<artifactId>batik-util</artifactId>
<version>1.7</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.thoughtworks.xstream</groupId>
<artifactId>xstream</artifactId>
<version>1.4.2</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.ws.security</groupId>
<artifactId>wss4j</artifactId>
<version>1.5.7</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.ganyo</groupId>
<artifactId>gcm-server</artifactId>
<version>1.0.2</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.python</groupId>
<artifactId>jython-standalone</artifactId>
<version>2.7-b1</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.jruby</groupId>
<artifactId>jruby-complete</artifactId>
<version>1.7.4</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.jruby</groupId>
<artifactId>jruby</artifactId>
<version>1.6.3</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId>
<version>2.12</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
<version>1.11.1</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.sun.faces</groupId>
<artifactId>jsf-impl</artifactId>
<version>2.2.8-02</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.google.inject</groupId>
<artifactId>guice</artifactId>
<version>3.0</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.opensaml</groupId>
<artifactId>xmltooling</artifactId>
<version>1.4.1</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
<version>3.2.12.RELEASE</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.3.1</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.google.gerrit</groupId>
<artifactId>gerrit-extension-api</artifactId>
<version>2.11</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.google.apis</groupId>
<artifactId>google-api-services-sqladmin</artifactId>
<version>v1beta4-rev5-1.20.0</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.google.gwt.google-apis</groupId>
<artifactId>gwt-gears</artifactId>
<version>1.2.1</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.mozilla</groupId>
<artifactId>rhino</artifactId>
<version>1.7.6</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.microsoft.windowsazure</groupId>
<artifactId>microsoft-azure-api-media</artifactId>
<version>0.5.0</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.microsoft.windowsazure</groupId>
<artifactId>microsoft-azure-api-management-sql</artifactId>
<version>0.5.0</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.microsoft.bingads</groupId>
<artifactId>microsoft.bingads</artifactId>
<version>9.3.4</version>
<scope>provided</scope>
<scope>test</scope>
<optional>true</optional>
</dependency>
</dependencies>
</profile>
</profiles>
<properties>
<!-- new versions of lucene are compiled with JDK 1.7 and cannot be used ubiquitously in Jenkins
this, we cannot upgrade beyond 4.7.2 -->
<apache.lucene.version>4.7.2</apache.lucene.version>
</properties>
</project>

View File

@@ -17,15 +17,6 @@
*/
package org.owasp.dependencycheck;
import java.io.File;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.owasp.dependencycheck.analyzer.AnalysisPhase;
import org.owasp.dependencycheck.analyzer.Analyzer;
import org.owasp.dependencycheck.analyzer.AnalyzerService;
@@ -39,9 +30,19 @@ import org.owasp.dependencycheck.data.update.UpdateService;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.exception.NoDataException;
import org.owasp.dependencycheck.utils.FileUtils;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileFilter;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
/**
* Scans files, directories, etc. for Dependencies. Analyzers are loaded and used to process the files found by the scan, if a
@@ -49,7 +50,7 @@ import org.owasp.dependencycheck.utils.Settings;
*
* @author Jeremy Long
*/
public class Engine {
public class Engine implements FileFilter {
/**
* The list of dependencies.
@@ -72,7 +73,7 @@ public class Engine {
/**
* The Logger for use throughout the class.
*/
private static final Logger LOGGER = Logger.getLogger(Engine.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(Engine.class);
/**
* Creates a new Engine.
@@ -167,7 +168,6 @@ public class Engine {
*
* @param paths an array of paths to files or directories to be analyzed
* @return the list of dependencies scanned
*
* @since v0.3.2.5
*/
public List<Dependency> scan(String[] paths) {
@@ -200,7 +200,6 @@ public class Engine {
*
* @param files an array of paths to files or directories to be analyzed.
* @return the list of dependencies
*
* @since v0.3.2.5
*/
public List<Dependency> scan(File[] files) {
@@ -220,7 +219,6 @@ public class Engine {
*
* @param files a set of paths to files or directories to be analyzed
* @return the list of dependencies scanned
*
* @since v0.3.2.5
*/
public List<Dependency> scan(Set<File> files) {
@@ -240,7 +238,6 @@ public class Engine {
*
* @param files a set of paths to files or directories to be analyzed
* @return the list of dependencies scanned
*
* @since v0.3.2.5
*/
public List<Dependency> scan(List<File> files) {
@@ -260,9 +257,7 @@ public class Engine {
*
* @param file the path to a file or directory to be analyzed
* @return the list of dependencies scanned
*
* @since v0.3.2.4
*
*/
public List<Dependency> scan(File file) {
if (file.exists()) {
@@ -312,36 +307,30 @@ public class Engine {
* @return the scanned dependency
*/
protected Dependency scanFile(File file) {
if (!file.isFile()) {
final String msg = String.format("Path passed to scanFile(File) is not a file: %s. Skipping the file.", file.toString());
LOGGER.log(Level.FINE, msg);
return null;
}
final String fileName = file.getName();
String extension = FileUtils.getFileExtension(fileName);
if (null == extension) {
extension = fileName;
}
Dependency dependency = null;
if (supportsExtension(extension)) {
dependency = new Dependency(file);
if (extension == null ? fileName == null : extension.equals(fileName)) {
dependency.setFileExtension(extension);
if (file.isFile()) {
if (accept(file)) {
dependency = new Dependency(file);
dependencies.add(dependency);
}
dependencies.add(dependency);
} else {
LOGGER.debug("Path passed to scanFile(File) is not a file: {}. Skipping the file.", file);
}
return dependency;
}
/**
* Runs the analyzers against all of the dependencies.
* Runs the analyzers against all of the dependencies. Since the mutable dependencies list is exposed via
* {@link #getDependencies()}, this method iterates over a copy of the dependencies list. Thus, the potential for
* {@link java.util.ConcurrentModificationException}s is avoided, and analyzers may safely add or remove entries from the
* dependencies list.
*/
public void analyzeDependencies() {
boolean autoUpdate = true;
try {
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
} catch (InvalidSettingException ex) {
LOGGER.log(Level.FINE, "Invalid setting for auto-update; using true.");
LOGGER.debug("Invalid setting for auto-update; using true.");
}
if (autoUpdate) {
doUpdates();
@@ -351,24 +340,18 @@ public class Engine {
try {
ensureDataExists();
} catch (NoDataException ex) {
final String msg = String.format("%s%n%nUnable to continue dependency-check analysis.", ex.getMessage());
LOGGER.log(Level.SEVERE, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.error("{}\n\nUnable to continue dependency-check analysis.", ex.getMessage());
LOGGER.debug("", ex);
return;
} catch (DatabaseException ex) {
final String msg = String.format("%s%n%nUnable to continue dependency-check analysis.", ex.getMessage());
LOGGER.log(Level.SEVERE, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.error("{}\n\nUnable to continue dependency-check analysis.", ex.getMessage());
LOGGER.debug("", ex);
return;
}
final String logHeader = String.format("%n"
+ "----------------------------------------------------%n"
+ "BEGIN ANALYSIS%n"
+ "----------------------------------------------------");
LOGGER.log(Level.FINE, logHeader);
LOGGER.log(Level.INFO, "Analysis Starting");
LOGGER.debug("\n----------------------------------------------------\nBEGIN ANALYSIS\n----------------------------------------------------");
LOGGER.info("Analysis Starting");
// analysis phases
for (AnalysisPhase phase : AnalysisPhase.values()) {
@@ -381,30 +364,26 @@ public class Engine {
* analyzers may modify it. This prevents ConcurrentModificationExceptions.
* This is okay for adds/deletes because it happens per analyzer.
*/
final String msg = String.format("Begin Analyzer '%s'", a.getName());
LOGGER.log(Level.FINE, msg);
LOGGER.debug("Begin Analyzer '{}'", a.getName());
final Set<Dependency> dependencySet = new HashSet<Dependency>();
dependencySet.addAll(dependencies);
for (Dependency d : dependencySet) {
boolean shouldAnalyze = true;
if (a instanceof FileTypeAnalyzer) {
final FileTypeAnalyzer fAnalyzer = (FileTypeAnalyzer) a;
shouldAnalyze = fAnalyzer.supportsExtension(d.getFileExtension());
shouldAnalyze = fAnalyzer.accept(d.getActualFile());
}
if (shouldAnalyze) {
final String msgFile = String.format("Begin Analysis of '%s'", d.getActualFilePath());
LOGGER.log(Level.FINE, msgFile);
LOGGER.debug("Begin Analysis of '{}'", d.getActualFilePath());
try {
a.analyze(d, this);
} catch (AnalysisException ex) {
final String exMsg = String.format("An error occurred while analyzing '%s'.", d.getActualFilePath());
LOGGER.log(Level.WARNING, exMsg);
LOGGER.log(Level.FINE, "", ex);
LOGGER.warn("An error occurred while analyzing '{}'.", d.getActualFilePath());
LOGGER.debug("", ex);
} catch (Throwable ex) {
final String axMsg = String.format("An unexpected error occurred during analysis of '%s'", d.getActualFilePath());
//final AnalysisException ax = new AnalysisException(axMsg, ex);
LOGGER.log(Level.WARNING, axMsg);
LOGGER.log(Level.FINE, "", ex);
LOGGER.warn("An unexpected error occurred during analysis of '{}'", d.getActualFilePath());
LOGGER.debug("", ex);
}
}
}
@@ -418,12 +397,8 @@ public class Engine {
}
}
final String logFooter = String.format("%n"
+ "----------------------------------------------------%n"
+ "END ANALYSIS%n"
+ "----------------------------------------------------");
LOGGER.log(Level.FINE, logFooter);
LOGGER.log(Level.INFO, "Analysis Complete");
LOGGER.debug("\n----------------------------------------------------\nEND ANALYSIS\n----------------------------------------------------");
LOGGER.info("Analysis Complete");
}
/**
@@ -434,17 +409,15 @@ public class Engine {
*/
protected Analyzer initializeAnalyzer(Analyzer analyzer) {
try {
final String msg = String.format("Initializing %s", analyzer.getName());
LOGGER.log(Level.FINE, msg);
LOGGER.debug("Initializing {}", analyzer.getName());
analyzer.initialize();
} catch (Throwable ex) {
final String msg = String.format("Exception occurred initializing %s.", analyzer.getName());
LOGGER.log(Level.SEVERE, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.error("Exception occurred initializing {}.", analyzer.getName());
LOGGER.debug("", ex);
try {
analyzer.close();
} catch (Throwable ex1) {
LOGGER.log(Level.FINEST, null, ex1);
LOGGER.trace("", ex1);
}
}
return analyzer;
@@ -456,12 +429,11 @@ public class Engine {
* @param analyzer the analyzer to close
*/
protected void closeAnalyzer(Analyzer analyzer) {
final String msg = String.format("Closing Analyzer '%s'", analyzer.getName());
LOGGER.log(Level.FINE, msg);
LOGGER.debug("Closing Analyzer '{}'", analyzer.getName());
try {
analyzer.close();
} catch (Throwable ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
}
@@ -477,9 +449,9 @@ public class Engine {
try {
source.update();
} catch (UpdateException ex) {
LOGGER.log(Level.WARNING,
LOGGER.warn(
"Unable to update Cached Web DataSource, using local data instead. Results may not include recent vulnerabilities.");
LOGGER.log(Level.FINE, String.format("Unable to update details for %s", source.getClass().getName()), ex);
LOGGER.debug("Unable to update details for {}", source.getClass().getName(), ex);
}
}
LOGGER.info("Check for updates complete");
@@ -502,18 +474,18 @@ public class Engine {
/**
* Checks all analyzers to see if an extension is supported.
*
* @param ext a file extension
* @param file a file extension
* @return true or false depending on whether or not the file extension is supported
*/
public boolean supportsExtension(String ext) {
if (ext == null) {
public boolean accept(File file) {
if (file == null) {
return false;
}
boolean scan = false;
for (FileTypeAnalyzer a : this.fileTypeAnalyzers) {
/* note, we can't break early on this loop as the analyzers need to know if
they have files to work on prior to initialization */
scan |= a.supportsExtension(ext);
scan |= a.accept(file);
}
return scan;
}

View File

@@ -20,8 +20,6 @@ package org.owasp.dependencycheck.agent;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
@@ -32,6 +30,8 @@ import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.exception.ScanAgentException;
import org.owasp.dependencycheck.reporting.ReportGenerator;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class provides a way to easily conduct a scan solely based on existing evidence metadata rather than collecting evidence
@@ -67,7 +67,7 @@ public class DependencyCheckScanAgent {
/**
* Logger for use throughout the class.
*/
private static final Logger LOGGER = Logger.getLogger(DependencyCheckScanAgent.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(DependencyCheckScanAgent.class);
/**
* The application name for the report.
*/
@@ -861,7 +861,7 @@ public class DependencyCheckScanAgent {
cve.open();
prop = cve.getDatabaseProperties();
} catch (DatabaseException ex) {
LOGGER.log(Level.FINE, "Unable to retrieve DB Properties", ex);
LOGGER.debug("Unable to retrieve DB Properties", ex);
} finally {
if (cve != null) {
cve.close();
@@ -871,13 +871,13 @@ public class DependencyCheckScanAgent {
try {
r.generateReports(outDirectory.getCanonicalPath(), this.reportFormat.name());
} catch (IOException ex) {
LOGGER.log(Level.SEVERE,
LOGGER.error(
"Unexpected exception occurred during analysis; please see the verbose error log for more details.");
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
} catch (Throwable ex) {
LOGGER.log(Level.SEVERE,
LOGGER.error(
"Unexpected exception occurred during analysis; please see the verbose error log for more details.");
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
}
}
@@ -981,9 +981,9 @@ public class DependencyCheckScanAgent {
checkForFailure(engine.getDependencies());
}
} catch (DatabaseException ex) {
LOGGER.log(Level.SEVERE,
LOGGER.error(
"Unable to connect to the dependency-check database; analysis has stopped");
LOGGER.log(Level.FINE, "", ex);
LOGGER.debug("", ex);
} finally {
Settings.cleanup(true);
if (engine != null) {
@@ -1058,10 +1058,9 @@ public class DependencyCheckScanAgent {
}
}
if (summary.length() > 0) {
final String msg = String.format("%n%n"
+ "One or more dependencies were identified with known vulnerabilities:%n%n%s"
+ "%n%nSee the dependency-check report for more details.%n%n", summary.toString());
LOGGER.log(Level.WARNING, msg);
LOGGER.warn("\n\nOne or more dependencies were identified with known vulnerabilities:\n\n{}\n\n"
+ "See the dependency-check report for more details.\n\n",
summary.toString());
}
}

View File

@@ -17,16 +17,19 @@
*/
package org.owasp.dependencycheck.analyzer;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileFilter;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
/**
* The base FileTypeAnalyzer that all analyzers that have specific file types they analyze should extend.
@@ -37,8 +40,7 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
//<editor-fold defaultstate="collapsed" desc="Constructor">
/**
* Base constructor that all children must call. This checks the configuration to determine if the analyzer is
* enabled.
* Base constructor that all children must call. This checks the configuration to determine if the analyzer is enabled.
*/
public AbstractFileTypeAnalyzer() {
reset();
@@ -49,7 +51,7 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(AbstractFileTypeAnalyzer.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractFileTypeAnalyzer.class);
/**
* Whether the file type analyzer detected any files it needs to analyze.
*/
@@ -100,19 +102,16 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
//<editor-fold defaultstate="collapsed" desc="Abstract methods children must implement">
/**
* <p>
* Returns a list of supported file extensions. An example would be an analyzer that inspected java jar files. The
* getSupportedExtensions function would return a set with a single element "jar".</p>
* Returns the {@link java.io.FileFilter} used to determine which files are to be analyzed. An example would be an analyzer
* that inspected Java jar files. Implementors may use {@link org.owasp.dependencycheck.utils.FileFilterBuilder}.</p>
*
* @return the file filter used to determine which files are to be analyzed
* <p/>
* <p>
* <b>Note:</b> when implementing this the extensions returned MUST be lowercase.</p>
*
* @return The file extensions supported by this analyzer.
*
* <p>
* If the analyzer returns null it will not cause additional files to be analyzed but will be executed against every
* file loaded</p>
* If the analyzer returns null it will not cause additional files to be analyzed, but will be executed against every file
* loaded.</p>
*/
protected abstract Set<String> getSupportedExtensions();
protected abstract FileFilter getFileFilter();
/**
* Initializes the file type analyzer.
@@ -122,8 +121,8 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
protected abstract void initializeFileTypeAnalyzer() throws Exception;
/**
* Analyzes a given dependency. If the dependency is an archive, such as a WAR or EAR, the contents are extracted,
* scanned, and added to the list of dependencies within the engine.
* Analyzes a given dependency. If the dependency is an archive, such as a WAR or EAR, the contents are extracted, scanned,
* and added to the list of dependencies within the engine.
*
* @param dependency the dependency to analyze
* @param engine the engine scanning
@@ -164,17 +163,15 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
try {
enabled = Settings.getBoolean(key, true);
} catch (InvalidSettingException ex) {
String msg = String.format("Invalid setting for property '%s'", key);
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, "", ex);
msg = String.format("%s has been disabled", getName());
LOGGER.log(Level.WARNING, msg);
LOGGER.warn("Invalid setting for property '{}'", key);
LOGGER.debug("", ex);
LOGGER.warn("{} has been disabled", getName());
}
}
/**
* Analyzes a given dependency. If the dependency is an archive, such as a WAR or EAR, the contents are extracted,
* scanned, and added to the list of dependencies within the engine.
* Analyzes a given dependency. If the dependency is an archive, such as a WAR or EAR, the contents are extracted, scanned,
* and added to the list of dependencies within the engine.
*
* @param dependency the dependency to analyze
* @param engine the engine scanning
@@ -187,39 +184,28 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
}
}
/**
* Returns whether or not this analyzer can process the given extension.
*
* @param extension the file extension to test for support.
* @return whether or not the specified file extension is supported by this analyzer.
*/
@Override
public final boolean supportsExtension(String extension) {
if (!enabled) {
return false;
}
final Set<String> ext = getSupportedExtensions();
if (ext == null) {
final String msg = String.format("The '%s' analyzer is misconfigured and does not have any file extensions;"
+ " it will be disabled", getName());
LOGGER.log(Level.SEVERE, msg);
return false;
} else {
final boolean match = ext.contains(extension);
if (match) {
filesMatched = match;
public boolean accept(File pathname) {
final FileFilter filter = getFileFilter();
boolean accepted = false;
if (null == filter) {
LOGGER.error("The '{}' analyzer is misconfigured and does not have a file filter; it will be disabled", getName());
} else if (enabled) {
accepted = filter.accept(pathname);
if (accepted) {
filesMatched = true;
}
return match;
}
return accepted;
}
//</editor-fold>
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Static utility methods">
/**
* <p>
* Utility method to help in the creation of the extensions set. This constructs a new Set that can be used in a
* final static declaration.</p>
*
* Utility method to help in the creation of the extensions set. This constructs a new Set that can be used in a final static
* declaration.</p>
* <p/>
* <p>
* This implementation was copied from
* http://stackoverflow.com/questions/2041778/initialize-java-hashset-values-by-construction</p>
@@ -229,9 +215,9 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
*/
protected static Set<String> newHashSet(String... strings) {
final Set<String> set = new HashSet<String>();
Collections.addAll(set, strings);
return set;
}
//</editor-fold>
}

View File

@@ -24,8 +24,6 @@ import java.net.MalformedURLException;
import java.net.URL;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import org.owasp.dependencycheck.suppression.SuppressionParseException;
import org.owasp.dependencycheck.suppression.SuppressionParser;
@@ -34,6 +32,8 @@ import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.Downloader;
import org.owasp.dependencycheck.utils.FileUtils;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Abstract base suppression analyzer that contains methods for parsing the suppression xml file.
@@ -45,7 +45,7 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
/**
* The Logger for use throughout the class
*/
private static final Logger LOGGER = Logger.getLogger(AbstractSuppressionAnalyzer.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractSuppressionAnalyzer.class);
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/**
@@ -103,7 +103,7 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
try {
rules = parser.parseSuppressionRules(this.getClass().getClassLoader().getResourceAsStream("dependencycheck-base-suppression.xml"));
} catch (SuppressionParseException ex) {
LOGGER.log(Level.FINE, "Unable to parse the base suppression data file", ex);
LOGGER.debug("Unable to parse the base suppression data file", ex);
}
final String suppressionFilePath = Settings.getString(Settings.KEYS.SUPPRESSION_FILE);
if (suppressionFilePath == null) {
@@ -141,12 +141,11 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
try {
//rules = parser.parseSuppressionRules(file);
rules.addAll(parser.parseSuppressionRules(file));
LOGGER.log(Level.FINE, rules.size() + " suppression rules were loaded.");
LOGGER.debug("{} suppression rules were loaded.", rules.size());
} catch (SuppressionParseException ex) {
final String msg = String.format("Unable to parse suppression xml file '%s'", file.getPath());
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.WARNING, ex.getMessage());
LOGGER.log(Level.FINE, "", ex);
LOGGER.warn("Unable to parse suppression xml file '{}'", file.getPath());
LOGGER.warn(ex.getMessage());
LOGGER.debug("", ex);
throw ex;
}
}
@@ -171,8 +170,8 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
* @throws SuppressionParseException throws the generated SuppressionParseException
*/
private void throwSuppressionParseException(String message, Exception exception) throws SuppressionParseException {
LOGGER.log(Level.WARNING, message);
LOGGER.log(Level.FINE, "", exception);
LOGGER.warn(message);
LOGGER.debug("", exception);
throw new SuppressionParseException(message, exception);
}
}

View File

@@ -20,6 +20,7 @@ package org.owasp.dependencycheck.analyzer;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
@@ -31,8 +32,7 @@ import java.util.Enumeration;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveInputStream;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
@@ -46,13 +46,16 @@ import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.analyzer.exception.ArchiveExtractionException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.FileUtils;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
* An analyzer that extracts files from archives and ensures any supported files contained within the archive are added
* to the dependency list.</p>
* An analyzer that extracts files from archives and ensures any supported files contained within the archive are added to the
* dependency list.</p>
*
* @author Jeremy Long
*/
@@ -61,7 +64,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(ArchiveAnalyzer.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(ArchiveAnalyzer.class);
/**
* The buffer size to use when extracting files from the archive.
*/
@@ -97,15 +100,15 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
*/
private static final Set<String> ZIPPABLES = newHashSet("zip", "ear", "war", "jar", "sar", "apk", "nupkg");
/**
* The set of file extensions supported by this analyzer. Note for developers, any additions to this list will need
* to be explicitly handled in extractFiles().
* The set of file extensions supported by this analyzer. Note for developers, any additions to this list will need to be
* explicitly handled in extractFiles().
*/
private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz");
/**
* The set of file extensions to remove from the engine's collection of dependencies.
* Detects files with extensions to remove from the engine's collection of dependencies.
*/
private static final Set<String> REMOVE_FROM_ANALYSIS = newHashSet("zip", "tar", "gz", "tgz"); //TODO add nupkg, apk, sar?
private static final FileFilter REMOVE_FROM_ANALYSIS = FileFilterBuilder.newInstance().addExtensions("zip", "tar", "gz", "tgz").build();
static {
final String additionalZipExt = Settings.getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS);
@@ -117,15 +120,20 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
}
/**
* Returns a list of file EXTENSIONS supported by this analyzer.
*
* @return a list of file EXTENSIONS supported by this analyzer.
* The file filter used to filter supported files.
*/
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build();
@Override
public Set<String> getSupportedExtensions() {
return EXTENSIONS;
protected FileFilter getFileFilter() {
return FILTER;
}
/**
* Detects files with .zip extension.
*/
private static final FileFilter ZIP_FILTER = FileFilterBuilder.newInstance().addExtensions("zip").build();
/**
* Returns the name of the analyzer.
*
@@ -184,17 +192,17 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
@Override
public void close() throws Exception {
if (tempFileLocation != null && tempFileLocation.exists()) {
LOGGER.log(Level.FINE, "Attempting to delete temporary files");
LOGGER.debug("Attempting to delete temporary files");
final boolean success = FileUtils.delete(tempFileLocation);
if (!success && tempFileLocation != null && tempFileLocation.exists() && tempFileLocation.list().length > 0) {
LOGGER.log(Level.WARNING, "Failed to delete some temporary files, see the log for more details");
LOGGER.warn("Failed to delete some temporary files, see the log for more details");
}
}
}
/**
* Analyzes a given dependency. If the dependency is an archive, such as a WAR or EAR, the contents are extracted,
* scanned, and added to the list of dependencies within the engine.
* Analyzes a given dependency. If the dependency is an archive, such as a WAR or EAR, the contents are extracted, scanned,
* and added to the list of dependencies within the engine.
*
* @param dependency the dependency to analyze
* @param engine the engine scanning
@@ -229,15 +237,15 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
//TODO - can we get more evidence from the parent? EAR contains module name, etc.
//analyze the dependency (i.e. extract files) if it is a supported type.
if (this.supportsExtension(d.getFileExtension()) && scanDepth < MAX_SCAN_DEPTH) {
if (this.accept(d.getActualFile()) && scanDepth < MAX_SCAN_DEPTH) {
scanDepth += 1;
analyze(d, engine);
scanDepth -= 1;
}
}
}
if (this.REMOVE_FROM_ANALYSIS.contains(dependency.getFileExtension())) {
if ("zip".equals(dependency.getFileExtension()) && isZipFileActuallyJarFile(dependency)) {
if (REMOVE_FROM_ANALYSIS.accept(dependency.getActualFile())) {
if (ZIP_FILTER.accept(dependency.getActualFile()) && isZipFileActuallyJarFile(dependency)) {
final File tdir = getNextTempDirectory();
final String fileName = dependency.getFileName();
@@ -264,8 +272,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
}
}
} catch (IOException ex) {
final String msg = String.format("Unable to perform deep copy on '%s'", dependency.getActualFile().getPath());
LOGGER.log(Level.FINE, msg, ex);
LOGGER.debug("Unable to perform deep copy on '{}'", dependency.getActualFile().getPath(), ex);
}
}
engine.getDependencies().remove(dependency);
@@ -310,7 +317,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
try {
fis = new FileInputStream(archive);
} catch (FileNotFoundException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new AnalysisException("Archive file was not found.", ex);
}
final String archiveExt = FileUtils.getFileExtension(archive.getName()).toLowerCase();
@@ -321,24 +328,22 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
extractArchive(new TarArchiveInputStream(new BufferedInputStream(fis)), destination, engine);
} else if ("gz".equals(archiveExt) || "tgz".equals(archiveExt)) {
final String uncompressedName = GzipUtils.getUncompressedFilename(archive.getName());
final String uncompressedExt = FileUtils.getFileExtension(uncompressedName).toLowerCase();
if (engine.supportsExtension(uncompressedExt)) {
decompressFile(new GzipCompressorInputStream(new BufferedInputStream(fis)), new File(destination, uncompressedName));
final File f = new File(destination, uncompressedName);
if (engine.accept(f)) {
decompressFile(new GzipCompressorInputStream(new BufferedInputStream(fis)), f);
}
}
} catch (ArchiveExtractionException ex) {
final String msg = String.format("Exception extracting archive '%s'.", archive.getName());
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.warn("Exception extracting archive '{}'.", archive.getName());
LOGGER.debug("", ex);
} catch (IOException ex) {
final String msg = String.format("Exception reading archive '%s'.", archive.getName());
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.warn("Exception reading archive '{}'.", archive.getName());
LOGGER.debug("", ex);
} finally {
try {
fis.close();
} catch (IOException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
}
}
}
@@ -365,10 +370,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
}
} else {
final File file = new File(destination, entry.getName());
final String ext = FileUtils.getFileExtension(file.getName());
if (engine.supportsExtension(ext)) {
final String extracting = String.format("Extracting '%s'", file.getPath());
LOGGER.fine(extracting);
if (engine.accept(file)) {
LOGGER.debug("Extracting '{}'", file.getPath());
BufferedOutputStream bos = null;
FileOutputStream fos = null;
try {
@@ -388,11 +391,11 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
}
bos.flush();
} catch (FileNotFoundException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
final String msg = String.format("Unable to find file '%s'.", file.getName());
throw new AnalysisException(msg, ex);
} catch (IOException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
final String msg = String.format("IO Exception while parsing file '%s'.", file.getName());
throw new AnalysisException(msg, ex);
} finally {
@@ -400,14 +403,14 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
try {
bos.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
}
if (fos != null) {
try {
fos.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
}
}
@@ -423,7 +426,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
try {
input.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
}
}
@@ -437,8 +440,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
* @throws ArchiveExtractionException thrown if there is an exception decompressing the file
*/
private void decompressFile(CompressorInputStream inputStream, File outputFile) throws ArchiveExtractionException {
final String msg = String.format("Decompressing '%s'", outputFile.getPath());
LOGGER.fine(msg);
LOGGER.debug("Decompressing '{}'", outputFile.getPath());
FileOutputStream out = null;
try {
out = new FileOutputStream(outputFile);
@@ -448,17 +450,17 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
out.write(buffer, 0, n);
}
} catch (FileNotFoundException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new ArchiveExtractionException(ex);
} catch (IOException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new ArchiveExtractionException(ex);
} finally {
if (out != null) {
try {
out.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
}
}
@@ -490,7 +492,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
}
}
} catch (IOException ex) {
LOGGER.log(Level.FINE, String.format("Unable to unzip zip file '%s'", dependency.getFilePath()), ex);
LOGGER.debug("Unable to unzip zip file '{}'", dependency.getFilePath(), ex);
} finally {
ZipFile.closeQuietly(zip);
}

View File

@@ -17,31 +17,36 @@
*/
package org.owasp.dependencycheck.analyzer;
import ch.qos.cal10n.IMessageConveyor;
import ch.qos.cal10n.MessageConveyor;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileFilter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Evidence;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
/**
* Analyzer for getting company, product, and version information from a .NET assembly.
*
@@ -61,7 +66,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The list of supported extensions
*/
private static final Set<String> SUPPORTED_EXTENSIONS = newHashSet("dll", "exe");
private static final String[] SUPPORTED_EXTENSIONS = {"dll", "exe"};
/**
* The temp value for GrokAssembly.exe
*/
@@ -70,10 +75,14 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
* The DocumentBuilder for parsing the XML
*/
private DocumentBuilder builder;
/**
* Message Conveyer
*/
private static final IMessageConveyor MESSAGE_CONVERYOR = new MessageConveyor(Locale.getDefault());
/**
* Logger
*/
private static final Logger LOGGER = Logger.getLogger(AssemblyAnalyzer.class.getName(), "dependencycheck-resources");
private static final Logger LOGGER = LoggerFactory.getLogger(AssemblyAnalyzer.class);
/**
* Builds the beginnings of a List for ProcessBuilder
@@ -106,7 +115,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
public void analyzeFileType(Dependency dependency, Engine engine)
throws AnalysisException {
if (grokAssemblyExe == null) {
LOGGER.warning("analyzer.AssemblyAnalyzer.notdeployed");
LOGGER.warn("GrokAssembly didn't get deployed");
return;
}
@@ -122,7 +131,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
String line = null;
// CHECKSTYLE:OFF
while (rdr.ready() && (line = rdr.readLine()) != null) {
LOGGER.log(Level.WARNING, "analyzer.AssemblyAnalyzer.grokassembly.stderr", line);
LOGGER.warn("Error from GrokAssembly: {}", line);
}
// CHECKSTYLE:ON
int rc = 0;
@@ -134,10 +143,11 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
return;
}
if (rc == 3) {
LOGGER.log(Level.FINE, "analyzer.AssemblyAnalyzer.notassembly", dependency.getActualFilePath());
LOGGER.debug("{} is not a .NET assembly or executable and as such cannot be analyzed by dependency-check",
dependency.getActualFilePath());
return;
} else if (rc != 0) {
LOGGER.log(Level.WARNING, "analyzer.AssemblyAnalyzer.grokassembly.rc", rc);
LOGGER.warn("Return code {} from GrokAssembly", rc);
}
final XPath xpath = XPathFactory.newInstance().newXPath();
@@ -178,7 +188,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
try {
rdr.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, "ignore", ex);
LOGGER.debug("ignore", ex);
}
}
}
@@ -205,24 +215,24 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
grokAssemblyExe = tempFile;
// Set the temp file to get deleted when we're done
grokAssemblyExe.deleteOnExit();
LOGGER.log(Level.FINE, "analyzer.AssemblyAnalyzer.grokassembly.deployed", grokAssemblyExe.getPath());
LOGGER.debug("Extracted GrokAssembly.exe to {}", grokAssemblyExe.getPath());
} catch (IOException ioe) {
this.setEnabled(false);
LOGGER.log(Level.WARNING, "analyzer.AssemblyAnalyzer.grokassembly.notdeployed", ioe.getMessage());
LOGGER.warn("Could not extract GrokAssembly.exe: {}", ioe.getMessage());
throw new AnalysisException("Could not extract GrokAssembly.exe", ioe);
} finally {
if (fos != null) {
try {
fos.close();
} catch (Throwable e) {
LOGGER.fine("Error closing output stream");
LOGGER.debug("Error closing output stream");
}
}
if (is != null) {
try {
is.close();
} catch (Throwable e) {
LOGGER.fine("Error closing input stream");
LOGGER.debug("Error closing input stream");
}
}
}
@@ -244,8 +254,8 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
final XPath xpath = XPathFactory.newInstance().newXPath();
final String error = xpath.evaluate("/assembly/error", doc);
if (p.waitFor() != 1 || error == null || "".equals(error)) {
LOGGER.warning("An error occurred with the .NET AssemblyAnalyzer, please see the log for more details.");
LOGGER.fine("GrokAssembly.exe is not working properly");
LOGGER.warn("An error occurred with the .NET AssemblyAnalyzer, please see the log for more details.");
LOGGER.debug("GrokAssembly.exe is not working properly");
grokAssemblyExe = null;
this.setEnabled(false);
throw new AnalysisException("Could not execute .NET AssemblyAnalyzer");
@@ -254,8 +264,9 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
if (e instanceof AnalysisException) {
throw (AnalysisException) e;
} else {
LOGGER.warning("analyzer.AssemblyAnalyzer.grokassembly.initialization.failed");
LOGGER.log(Level.FINE, "analyzer.AssemblyAnalyzer.grokassembly.initialization.message", e.getMessage());
LOGGER.warn("An error occurred with the .NET AssemblyAnalyzer;\n"
+ "this can be ignored unless you are scanning .NET DLLs. Please see the log for more details.");
LOGGER.debug("Could not execute GrokAssembly {}", e.getMessage());
this.setEnabled(false);
throw new AnalysisException("An error occured with the .NET AssemblyAnalyzer", e);
}
@@ -264,13 +275,18 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
try {
rdr.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, "ignore", ex);
LOGGER.trace("ignore", ex);
}
}
}
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
}
/**
* Removes resources used from the local file system.
*
* @throws Exception thrown if there is a problem closing the analyzer
*/
@Override
public void close() throws Exception {
super.close();
@@ -279,18 +295,19 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
grokAssemblyExe.deleteOnExit();
}
} catch (SecurityException se) {
LOGGER.fine("analyzer.AssemblyAnalyzer.grokassembly.notdeleted");
LOGGER.debug("Can't delete temporary GrokAssembly.exe");
}
}
/**
* Gets the set of extensions supported by this analyzer.
*
* @return the list of supported extensions
* The File Filter used to filter supported extensions.
*/
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(
SUPPORTED_EXTENSIONS).build();
@Override
public Set<String> getSupportedExtensions() {
return SUPPORTED_EXTENSIONS;
protected FileFilter getFileFilter() {
return FILTER;
}
/**

View File

@@ -0,0 +1,279 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 Institute for Defense Analyses. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import org.apache.commons.io.FileUtils;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.EvidenceCollection;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.UrlStringUtils;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Used to analyze Autoconf input files named configure.ac or configure.in. Files simply named "configure" are also analyzed,
* assuming they are generated by Autoconf, and contain certain special package descriptor variables.
*
* @author Dale Visser <dvisser@ida.org>
* @see <a href="https://www.gnu.org/software/autoconf/">Autoconf - GNU Project - Free Software Foundation (FSF)</a>
*/
public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
/**
* Autoconf output filename.
*/
private static final String CONFIGURE = "configure";
/**
* Autoconf input filename.
*/
private static final String CONFIGURE_IN = "configure.in";
/**
* Autoconf input filename.
*/
private static final String CONFIGURE_AC = "configure.ac";
/**
* The name of the analyzer.
*/
private static final String ANALYZER_NAME = "Autoconf Analyzer";
/**
* The phase that this analyzer is intended to run in.
*/
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
/**
* The set of file extensions supported by this analyzer.
*/
private static final String[] EXTENSIONS = {"ac", "in"};
/**
* Matches AC_INIT variables in the output configure script.
*/
private static final Pattern PACKAGE_VAR = Pattern.compile(
"PACKAGE_(.+?)='(.*?)'", Pattern.DOTALL | Pattern.CASE_INSENSITIVE);
/**
* Matches AC_INIT statement in configure.ac file.
*/
private static final Pattern AC_INIT_PATTERN;
static {
// each instance of param or sep_param has a capture group
final String param = "\\[{0,2}(.+?)\\]{0,2}";
final String sepParam = "\\s*,\\s*" + param;
// Group 1: Package
// Group 2: Version
// Group 3: optional
// Group 4: Bug report address (if it exists)
// Group 5: optional
// Group 6: Tarname (if it exists)
// Group 7: optional
// Group 8: URL (if it exists)
AC_INIT_PATTERN = Pattern.compile(String.format(
"AC_INIT\\(%s%s(%s)?(%s)?(%s)?\\s*\\)", param, sepParam,
sepParam, sepParam, sepParam), Pattern.DOTALL
| Pattern.CASE_INSENSITIVE);
}
/**
* The file filter used to determine which files this analyzer supports.
*/
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addFilenames(CONFIGURE).addExtensions(
EXTENSIONS).build();
/**
* Returns the FileFilter
*
* @return the FileFilter
*/
@Override
protected FileFilter getFileFilter() {
return FILTER;
}
/**
* Returns the name of the analyzer.
*
* @return the name of the analyzer.
*/
@Override
public String getName() {
return ANALYZER_NAME;
}
/**
* Returns the phase that the analyzer is intended to run in.
*
* @return the phase that the analyzer is intended to run in.
*/
@Override
public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE;
}
/**
* Returns the key used in the properties file to reference the analyzer's enabled property.
*
* @return the analyzer's enabled property setting key
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED;
}
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
throws AnalysisException {
final File actualFile = dependency.getActualFile();
final String name = actualFile.getName();
if (name.startsWith(CONFIGURE)) {
final File parent = actualFile.getParentFile();
final String parentName = parent.getName();
dependency.setDisplayFileName(parentName + "/" + name);
final boolean isOutputScript = CONFIGURE.equals(name);
if (isOutputScript || CONFIGURE_AC.equals(name)
|| CONFIGURE_IN.equals(name)) {
final String contents = getFileContents(actualFile);
if (!contents.isEmpty()) {
if (isOutputScript) {
extractConfigureScriptEvidence(dependency, name,
contents);
} else {
gatherEvidence(dependency, name, contents);
}
}
}
} else {
// copy, alter and set in case some other thread is iterating over
final List<Dependency> deps = new ArrayList<Dependency>(
engine.getDependencies());
deps.remove(dependency);
engine.setDependencies(deps);
}
}
/**
* Extracts evidence from the configuration.
*
* @param dependency the dependency being analyzed
* @param name the name of the source of evidence
* @param contents the contents to analyze for evidence
*/
private void extractConfigureScriptEvidence(Dependency dependency,
final String name, final String contents) {
final Matcher matcher = PACKAGE_VAR.matcher(contents);
while (matcher.find()) {
final String variable = matcher.group(1);
final String value = matcher.group(2);
if (!value.isEmpty()) {
if (variable.endsWith("NAME")) {
dependency.getProductEvidence().addEvidence(name, variable,
value, Confidence.HIGHEST);
} else if ("VERSION".equals(variable)) {
dependency.getVersionEvidence().addEvidence(name, variable,
value, Confidence.HIGHEST);
} else if ("BUGREPORT".equals(variable)) {
dependency.getVendorEvidence().addEvidence(name, variable,
value, Confidence.HIGH);
} else if ("URL".equals(variable)) {
dependency.getVendorEvidence().addEvidence(name, variable,
value, Confidence.HIGH);
}
}
}
}
/**
* Retrieves the contents of a given file.
*
* @param actualFile the file to read
* @return the contents of the file
* @throws AnalysisException thrown if there is an IO Exception
*/
private String getFileContents(final File actualFile)
throws AnalysisException {
String contents = "";
try {
contents = FileUtils.readFileToString(actualFile).trim();
} catch (IOException e) {
throw new AnalysisException(
"Problem occured while reading dependency file.", e);
}
return contents;
}
/**
* Gathers evidence from a given file
*
* @param dependency the dependency to add evidence to
* @param name the source of the evidence
* @param contents the evidence to analyze
*/
private void gatherEvidence(Dependency dependency, final String name,
String contents) {
final Matcher matcher = AC_INIT_PATTERN.matcher(contents);
if (matcher.find()) {
final EvidenceCollection productEvidence = dependency
.getProductEvidence();
productEvidence.addEvidence(name, "Package", matcher.group(1),
Confidence.HIGHEST);
dependency.getVersionEvidence().addEvidence(name,
"Package Version", matcher.group(2), Confidence.HIGHEST);
final EvidenceCollection vendorEvidence = dependency
.getVendorEvidence();
if (null != matcher.group(3)) {
vendorEvidence.addEvidence(name, "Bug report address",
matcher.group(4), Confidence.HIGH);
}
if (null != matcher.group(5)) {
productEvidence.addEvidence(name, "Tarname", matcher.group(6),
Confidence.HIGH);
}
if (null != matcher.group(7)) {
final String url = matcher.group(8);
if (UrlStringUtils.isUrl(url)) {
vendorEvidence.addEvidence(name, "URL", url,
Confidence.HIGH);
}
}
}
}
/**
* Initializes the file type analyzer.
*
* @throws Exception thrown if there is an exception during initialization
*/
@Override
protected void initializeFileTypeAnalyzer() throws Exception {
// No initialization needed.
}
}

View File

@@ -0,0 +1,216 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 Institute for Defense Analyses. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Checksum;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* <p>
* Used to analyze CMake build files, and collect information that can be used to determine the associated CPE.</p>
* <p/>
* <p>
* Note: This analyzer catches straightforward invocations of the project command, plus some other observed patterns of version
* inclusion in real CMake projects. Many projects make use of older versions of CMake and/or use custom "homebrew" ways to insert
* version information. Hopefully as the newer CMake call pattern grows in usage, this analyzer allow more CPEs to be
* identified.</p>
*
* @author Dale Visser <dvisser@ida.org>
*/
public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(CMakeAnalyzer.class);
/**
* Used when compiling file scanning regex patterns.
*/
private static final int REGEX_OPTIONS = Pattern.DOTALL
| Pattern.CASE_INSENSITIVE | Pattern.MULTILINE;
private static final Pattern PROJECT = Pattern.compile(
"^ *project *\\([ \\n]*(\\w+)[ \\n]*.*?\\)", REGEX_OPTIONS);
// Group 1: Product
// Group 2: Version
private static final Pattern SET_VERSION = Pattern
.compile(
"^ *set\\s*\\(\\s*(\\w+)_version\\s+\"?(\\d+(?:\\.\\d+)+)[\\s\"]?\\)",
REGEX_OPTIONS);
/**
* Detects files that can be analyzed.
*/
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(".cmake")
.addFilenames("CMakeLists.txt").build();
/**
* A reference to SHA1 message digest.
*/
private static MessageDigest sha1 = null;
static {
try {
sha1 = MessageDigest.getInstance("SHA1");
} catch (NoSuchAlgorithmException e) {
LOGGER.error(e.getMessage());
}
}
/**
* Returns the name of the CMake analyzer.
*
* @return the name of the analyzer
*
*/
@Override
public String getName() {
return "CMake Analyzer";
}
/**
* Tell that we are used for information collection.
*
* @return INFORMATION_COLLECTION
*/
@Override
public AnalysisPhase getAnalysisPhase() {
return AnalysisPhase.INFORMATION_COLLECTION;
}
/**
* Returns the set of supported file extensions.
*
* @return the set of supported file extensions
*/
@Override
protected FileFilter getFileFilter() {
return FILTER;
}
/**
* No-op initializer implementation.
*
* @throws Exception never thrown
*/
@Override
protected void initializeFileTypeAnalyzer() throws Exception {
// Nothing to do here.
}
/**
* Analyzes python packages and adds evidence to the dependency.
*
* @param dependency the dependency being analyzed
* @param engine the engine being used to perform the scan
* @throws AnalysisException thrown if there is an unrecoverable error analyzing the dependency
*/
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
throws AnalysisException {
final File file = dependency.getActualFile();
final String parentName = file.getParentFile().getName();
final String name = file.getName();
dependency.setDisplayFileName(String.format("%s%c%s", parentName, File.separatorChar, name));
String contents;
try {
contents = FileUtils.readFileToString(file).trim();
} catch (IOException e) {
throw new AnalysisException(
"Problem occurred while reading dependency file.", e);
}
if (StringUtils.isNotBlank(contents)) {
final Matcher m = PROJECT.matcher(contents);
int count = 0;
while (m.find()) {
count++;
LOGGER.debug(String.format(
"Found project command match with %d groups: %s",
m.groupCount(), m.group(0)));
final String group = m.group(1);
LOGGER.debug("Group 1: " + group);
dependency.getProductEvidence().addEvidence(name, "Project",
group, Confidence.HIGH);
}
LOGGER.debug(String.format("Found %d matches.", count));
analyzeSetVersionCommand(dependency, engine, contents);
}
}
private void analyzeSetVersionCommand(Dependency dependency, Engine engine, String contents) {
final Dependency orig = dependency;
final Matcher m = SET_VERSION.matcher(contents);
int count = 0;
while (m.find()) {
count++;
LOGGER.debug(String.format(
"Found project command match with %d groups: %s",
m.groupCount(), m.group(0)));
String product = m.group(1);
final String version = m.group(2);
LOGGER.debug("Group 1: " + product);
LOGGER.debug("Group 2: " + version);
final String aliasPrefix = "ALIASOF_";
if (product.startsWith(aliasPrefix)) {
product = product.replaceFirst(aliasPrefix, "");
}
if (count > 1) {
//TODO - refactor so we do not assign to the parameter (checkstyle)
dependency = new Dependency(orig.getActualFile());
dependency.setDisplayFileName(String.format("%s:%s", orig.getDisplayFileName(), product));
final String filePath = String.format("%s:%s", orig.getFilePath(), product);
dependency.setFilePath(filePath);
// prevents coalescing into the dependency provided by engine
dependency.setSha1sum(Checksum.getHex(sha1.digest(filePath.getBytes())));
engine.getDependencies().add(dependency);
}
final String source = dependency.getDisplayFileName();
dependency.getProductEvidence().addEvidence(source, "Product",
product, Confidence.MEDIUM);
dependency.getVersionEvidence().addEvidence(source, "Version",
version, Confidence.MEDIUM);
}
LOGGER.debug(String.format("Found %d matches.", count));
}
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_CMAKE_ENABLED;
}
}

View File

@@ -25,8 +25,6 @@ import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.queryparser.classic.ParseException;
@@ -49,6 +47,8 @@ import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* CPEAnalyzer is a utility class that takes a project dependency and attempts to discern if there is an associated CPE. It uses
@@ -61,7 +61,7 @@ public class CPEAnalyzer implements Analyzer {
/**
* The Logger.
*/
private static final Logger LOGGER = Logger.getLogger(CPEAnalyzer.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(CPEAnalyzer.class);
/**
* The maximum number of query results to return.
*/
@@ -134,15 +134,15 @@ public class CPEAnalyzer implements Analyzer {
* process.
*/
public void open() throws IOException, DatabaseException {
LOGGER.log(Level.FINE, "Opening the CVE Database");
LOGGER.debug("Opening the CVE Database");
cve = new CveDB();
cve.open();
LOGGER.log(Level.FINE, "Creating the Lucene CPE Index");
LOGGER.debug("Creating the Lucene CPE Index");
cpe = CpeMemoryIndex.getInstance();
try {
cpe.open(cve);
} catch (IndexException ex) {
LOGGER.log(Level.FINE, "IndexException", ex);
LOGGER.debug("IndexException", ex);
throw new DatabaseException(ex);
}
}
@@ -154,9 +154,11 @@ public class CPEAnalyzer implements Analyzer {
public void close() {
if (cpe != null) {
cpe.close();
cpe = null;
}
if (cve != null) {
cve.close();
cve = null;
}
}
@@ -180,11 +182,11 @@ public class CPEAnalyzer implements Analyzer {
for (Confidence confidence : Confidence.values()) {
if (dependency.getVendorEvidence().contains(confidence)) {
vendors = addEvidenceWithoutDuplicateTerms(vendors, dependency.getVendorEvidence(), confidence);
LOGGER.fine(String.format("vendor search: %s", vendors));
LOGGER.debug("vendor search: {}", vendors);
}
if (dependency.getProductEvidence().contains(confidence)) {
products = addEvidenceWithoutDuplicateTerms(products, dependency.getProductEvidence(), confidence);
LOGGER.fine(String.format("product search: %s", products));
LOGGER.debug("product search: {}", products);
}
if (!vendors.isEmpty() && !products.isEmpty()) {
final List<IndexEntry> entries = searchCPE(vendors, products, dependency.getProductEvidence().getWeighting(),
@@ -194,11 +196,11 @@ public class CPEAnalyzer implements Analyzer {
}
boolean identifierAdded = false;
for (IndexEntry e : entries) {
LOGGER.fine(String.format("Verifying entry: %s", e.toString()));
LOGGER.debug("Verifying entry: {}", e);
if (verifyEntry(e, dependency)) {
final String vendor = e.getVendor();
final String product = e.getProduct();
LOGGER.fine(String.format("identified vendor/product: %s/%s", vendor, product));
LOGGER.debug("identified vendor/product: {}/{}", vendor, product);
identifierAdded |= determineIdentifiers(dependency, vendor, product, confidence);
}
}
@@ -281,13 +283,11 @@ public class CPEAnalyzer implements Analyzer {
}
return ret;
} catch (ParseException ex) {
final String msg = String.format("Unable to parse: %s", searchString);
LOGGER.log(Level.WARNING, "An error occured querying the CPE data. See the log for more details.");
LOGGER.log(Level.INFO, msg, ex);
LOGGER.warn("An error occured querying the CPE data. See the log for more details.");
LOGGER.info("Unable to parse: {}", searchString, ex);
} catch (IOException ex) {
final String msg = String.format("IO Error with search string: %s", searchString);
LOGGER.log(Level.WARNING, "An error occured reading CPE data. See the log for more details.");
LOGGER.log(Level.INFO, msg, ex);
LOGGER.warn("An error occured reading CPE data. See the log for more details.");
LOGGER.info("IO Error with search string: {}", searchString, ex);
}
return null;
}
@@ -406,6 +406,8 @@ public class CPEAnalyzer implements Analyzer {
private boolean verifyEntry(final IndexEntry entry, final Dependency dependency) {
boolean isValid = false;
//TODO - does this nullify some of the fuzzy matching that happens in the lucene search?
// for instance CPE some-component and in the evidence we have SomeComponent.
if (collectionContainsString(dependency.getProductEvidence(), entry.getProduct())
&& collectionContainsString(dependency.getVendorEvidence(), entry.getVendor())) {
//&& collectionContainsVersion(dependency.getVersionEvidence(), entry.getVersion())
@@ -511,8 +513,8 @@ public class CPEAnalyzer implements Analyzer {
}
for (VulnerableSoftware vs : cpes) {
DependencyVersion dbVer;
if (vs.getRevision() != null && !vs.getRevision().isEmpty()) {
dbVer = DependencyVersionUtil.parseVersion(vs.getVersion() + "." + vs.getRevision());
if (vs.getUpdate() != null && !vs.getUpdate().isEmpty()) {
dbVer = DependencyVersionUtil.parseVersion(vs.getVersion() + "." + vs.getUpdate());
} else {
dbVer = DependencyVersionUtil.parseVersion(vs.getVersion());
}

View File

@@ -17,14 +17,6 @@
*/
package org.owasp.dependencycheck.analyzer;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.io.FileUtils;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
@@ -34,8 +26,18 @@ import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Evidence;
import org.owasp.dependencycheck.xml.pom.PomUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileFilter;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import java.util.List;
import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.Downloader;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
@@ -50,7 +52,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(CentralAnalyzer.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(CentralAnalyzer.class);
/**
* The name of the analyzer.
@@ -65,7 +67,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The types of files on which this will work.
*/
private static final Set<String> SUPPORTED_EXTENSIONS = newHashSet("jar");
private static final String SUPPORTED_EXTENSIONS = "jar";
/**
* The analyzer should be disabled if there are errors, so this is a flag to determine if such an error has occurred.
@@ -103,7 +105,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
if (Settings.getBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED)) {
if (!Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED)
|| NexusAnalyzer.DEFAULT_URL.equals(Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL))) {
LOGGER.fine("Enabling the Central analyzer");
LOGGER.debug("Enabling the Central analyzer");
retval = true;
} else {
LOGGER.info("Nexus analyzer is enabled, disabling the Central Analyzer");
@@ -112,7 +114,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.info("Central analyzer disabled");
}
} catch (InvalidSettingException ise) {
LOGGER.warning("Invalid setting. Disabling the Central analyzer");
LOGGER.warn("Invalid setting. Disabling the Central analyzer");
}
return retval;
}
@@ -124,11 +126,11 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
*/
@Override
public void initializeFileTypeAnalyzer() throws Exception {
LOGGER.fine("Initializing Central analyzer");
LOGGER.fine(String.format("Central analyzer enabled: %s", isEnabled()));
LOGGER.debug("Initializing Central analyzer");
LOGGER.debug("Central analyzer enabled: {}", isEnabled());
if (isEnabled()) {
final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_CENTRAL_URL);
LOGGER.fine(String.format("Central Analyzer URL: %s", searchUrl));
LOGGER.debug("Central Analyzer URL: {}", searchUrl);
searcher = new CentralSearch(new URL(searchUrl));
}
}
@@ -164,13 +166,13 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
}
/**
* Returns the extensions for which this Analyzer runs.
*
* @return the extensions for which this Analyzer runs
* The file filter used to determine which files this analyzer supports.
*/
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(SUPPORTED_EXTENSIONS).build();
@Override
public Set<String> getSupportedExtensions() {
return SUPPORTED_EXTENSIONS;
protected FileFilter getFileFilter() {
return FILTER;
}
/**
@@ -190,7 +192,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
final List<MavenArtifact> mas = searcher.searchSha1(dependency.getSha1sum());
final Confidence confidence = mas.size() > 1 ? Confidence.HIGH : Confidence.HIGHEST;
for (MavenArtifact ma : mas) {
LOGGER.fine(String.format("Central analyzer found artifact (%s) for dependency (%s)", ma.toString(), dependency.getFileName()));
LOGGER.debug("Central analyzer found artifact ({}) for dependency ({})", ma.toString(), dependency.getFileName());
dependency.addAsEvidence("central", ma, confidence);
boolean pomAnalyzed = false;
for (Evidence e : dependency.getVendorEvidence()) {
@@ -205,19 +207,17 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
final File baseDir = Settings.getTempDirectory();
pomFile = File.createTempFile("pom", ".xml", baseDir);
if (!pomFile.delete()) {
final String msg = String.format("Unable to fetch pom.xml for %s from Central; "
LOGGER.warn("Unable to fetch pom.xml for {} from Central; "
+ "this could result in undetected CPE/CVEs.", dependency.getFileName());
LOGGER.warning(msg);
LOGGER.fine("Unable to delete temp file");
LOGGER.debug("Unable to delete temp file");
}
LOGGER.fine(String.format("Downloading %s", ma.getPomUrl()));
LOGGER.debug("Downloading {}", ma.getPomUrl());
Downloader.fetchFile(new URL(ma.getPomUrl()), pomFile);
PomUtils.analyzePOM(dependency, pomFile);
} catch (DownloadFailedException ex) {
final String msg = String.format("Unable to download pom.xml for %s from Central; "
LOGGER.warn("Unable to download pom.xml for {} from Central; "
+ "this could result in undetected CPE/CVEs.", dependency.getFileName());
LOGGER.warning(msg);
} finally {
if (pomFile != null && !FileUtils.deleteQuietly(pomFile)) {
pomFile.deleteOnExit();
@@ -227,13 +227,12 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
}
} catch (IllegalArgumentException iae) {
LOGGER.info(String.format("invalid sha1-hash on %s", dependency.getFileName()));
LOGGER.info("invalid sha1-hash on {}", dependency.getFileName());
} catch (FileNotFoundException fnfe) {
LOGGER.fine(String.format("Artifact not found in repository: '%s", dependency.getFileName()));
LOGGER.debug("Artifact not found in repository: '{}", dependency.getFileName());
} catch (IOException ioe) {
LOGGER.log(Level.FINE, "Could not connect to Central search", ioe);
LOGGER.debug("Could not connect to Central search", ioe);
errorFlag = true;
}
}
}

View File

@@ -22,8 +22,6 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.ListIterator;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.owasp.dependencycheck.Engine;
@@ -32,7 +30,8 @@ import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
import org.owasp.dependencycheck.utils.LogUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
@@ -49,7 +48,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
/**
* The Logger.
*/
private static final Logger LOGGER = Logger.getLogger(DependencyBundlingAnalyzer.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(DependencyBundlingAnalyzer.class);
//<editor-fold defaultstate="collapsed" desc="Constants and Member Variables">
/**
@@ -111,7 +110,8 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex());
while (subIterator.hasNext()) {
final Dependency nextDependency = subIterator.next();
if (hashesMatch(dependency, nextDependency)) {
if (hashesMatch(dependency, nextDependency) && !containedInWar(dependency.getFilePath())
&& !containedInWar(nextDependency.getFilePath())) {
if (firstPathIsShortest(dependency.getFilePath(), nextDependency.getFilePath())) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
} else {
@@ -125,7 +125,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
break;
} else {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
nextDependency.getRelatedDependencies().remove(nextDependency);
dependency.getRelatedDependencies().remove(nextDependency);
}
} else if (cpeIdentifiersMatch(dependency, nextDependency)
&& hasSameBasePath(dependency, nextDependency)
@@ -262,10 +262,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
}
}
}
if (LogUtils.isVerboseLoggingEnabled()) {
final String msg = String.format("IdentifiersMatch=%s (%s, %s)", matches, dependency1.getFileName(), dependency2.getFileName());
LOGGER.log(Level.FINE, msg);
}
LOGGER.debug("IdentifiersMatch={} ({}, {})", matches, dependency1.getFileName(), dependency2.getFileName());
return matches;
}
@@ -343,10 +340,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
*/
returnVal = leftName.length() <= rightName.length();
}
if (LogUtils.isVerboseLoggingEnabled()) {
final String msg = String.format("IsCore=%s (%s, %s)", returnVal, left.getFileName(), right.getFileName());
LOGGER.log(Level.FINE, msg);
}
LOGGER.debug("IsCore={} ({}, {})", returnVal, left.getFileName(), right.getFileName());
return returnVal;
}
@@ -421,4 +415,14 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
}
return count;
}
/**
* Checks if the given file path is contained within a war or ear file.
*
* @param filePath the file path to check
* @return true if the path contains '.war\' or '.ear\'.
*/
private boolean containedInWar(String filePath) {
return filePath == null ? false : filePath.matches(".*\\.(ear|war)[\\\\/].*");
}
}

View File

@@ -17,6 +17,7 @@
*/
package org.owasp.dependencycheck.analyzer;
import java.io.FileFilter;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
@@ -25,8 +26,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.owasp.dependencycheck.Engine;
@@ -34,6 +33,9 @@ import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This analyzer attempts to remove some well known false positives - specifically regarding the java runtime.
@@ -45,7 +47,13 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
/**
* The Logger.
*/
private static final Logger LOGGER = Logger.getLogger(FalsePositiveAnalyzer.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(FalsePositiveAnalyzer.class);
/**
* The file filter used to find DLL and EXE.
*/
private static final FileFilter DLL_EXE_FILTER = FileFilterBuilder.newInstance().addExtensions("dll", "exe").build();
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/**
* The name of the analyzer.
@@ -171,7 +179,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
final String nextVersion = nextCpe.getVersion();
if (currentVersion == null && nextVersion == null) {
//how did we get here?
LOGGER.log(Level.FINE, "currentVersion and nextVersion are both null?");
LOGGER.debug("currentVersion and nextVersion are both null?");
} else if (currentVersion == null && nextVersion != null) {
dependency.getIdentifiers().remove(currentId);
} else if (nextVersion == null && currentVersion != null) {
@@ -248,15 +256,15 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
try {
cpe.parseName(value);
} catch (UnsupportedEncodingException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
return null;
}
return cpe;
}
/**
* Removes bad CPE matches for a dependency. Unfortunately, right now these are hard-coded patches for specific
* problems identified when testing this on a LARGE volume of jar files.
* Removes bad CPE matches for a dependency. Unfortunately, right now these are hard-coded patches for specific problems
* identified when testing this on a LARGE volume of jar files.
*
* @param dependency the dependency to analyze
*/
@@ -273,7 +281,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
//Set<Evidence> artifactId = dependency.getVendorEvidence().getEvidence("pom", "artifactid");
while (itr.hasNext()) {
final Identifier i = itr.next();
//TODO move this startsWith expression to a configuration file?
//TODO move this startsWith expression to the base suppression file
if ("cpe".equals(i.getType())) {
if ((i.getValue().matches(".*c\\+\\+.*")
|| i.getValue().startsWith("cpe:/a:file:file")
@@ -288,7 +296,14 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|| dependency.getFileName().toLowerCase().endsWith(".dll")
|| dependency.getFileName().toLowerCase().endsWith(".exe")
|| dependency.getFileName().toLowerCase().endsWith(".nuspec")
|| dependency.getFileName().toLowerCase().endsWith(".nupkg"))) {
|| dependency.getFileName().toLowerCase().endsWith(".zip")
|| dependency.getFileName().toLowerCase().endsWith(".sar")
|| dependency.getFileName().toLowerCase().endsWith(".apk")
|| dependency.getFileName().toLowerCase().endsWith(".tar")
|| dependency.getFileName().toLowerCase().endsWith(".gz")
|| dependency.getFileName().toLowerCase().endsWith(".tgz")
|| dependency.getFileName().toLowerCase().endsWith(".ear")
|| dependency.getFileName().toLowerCase().endsWith(".war"))) {
itr.remove();
} else if ((i.getValue().startsWith("cpe:/a:jquery:jquery")
|| i.getValue().startsWith("cpe:/a:prototypejs:prototype")
@@ -302,8 +317,11 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|| i.getValue().startsWith("cpe:/a:microsoft:word")
|| i.getValue().startsWith("cpe:/a:microsoft:visio")
|| i.getValue().startsWith("cpe:/a:microsoft:powerpoint")
|| i.getValue().startsWith("cpe:/a:microsoft:office"))
|| i.getValue().startsWith("cpe:/a:microsoft:office")
|| i.getValue().startsWith("cpe:/a:core_ftp:core_ftp"))
&& (dependency.getFileName().toLowerCase().endsWith(".jar")
|| dependency.getFileName().toLowerCase().endsWith(".ear")
|| dependency.getFileName().toLowerCase().endsWith(".war")
|| dependency.getFileName().toLowerCase().endsWith("pom.xml"))) {
itr.remove();
} else if (i.getValue().startsWith("cpe:/a:apache:maven")
@@ -354,9 +372,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
}
/**
* There are some known CPE entries, specifically regarding sun and oracle products due to the acquisition and
* changes in product names, that based on given evidence we can add the related CPE entries to ensure a complete
* list of CVE entries.
* There are some known CPE entries, specifically regarding sun and oracle products due to the acquisition and changes in
* product names, that based on given evidence we can add the related CPE entries to ensure a complete list of CVE entries.
*
* @param dependency the dependency being analyzed
*/
@@ -388,23 +405,22 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
newCpe4,
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe4, "UTF-8")));
} catch (UnsupportedEncodingException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
}
}
}
}
/**
* Removes duplicate entries identified that are contained within JAR files. These occasionally crop up due to POM
* entries or other types of files (such as DLLs and EXEs) being contained within the JAR.
* Removes duplicate entries identified that are contained within JAR files. These occasionally crop up due to POM entries or
* other types of files (such as DLLs and EXEs) being contained within the JAR.
*
* @param dependency the dependency that might be a duplicate
* @param engine the engine used to scan all dependencies
*/
private void removeDuplicativeEntriesFromJar(Dependency dependency, Engine engine) {
if (dependency.getFileName().toLowerCase().endsWith("pom.xml")
|| "dll".equals(dependency.getFileExtension())
|| "exe".equals(dependency.getFileExtension())) {
|| DLL_EXE_FILTER.accept(dependency.getActualFile())) {
String parentPath = dependency.getFilePath().toLowerCase();
if (parentPath.contains(".jar")) {
parentPath = parentPath.substring(0, parentPath.indexOf(".jar") + 4);

View File

@@ -17,20 +17,14 @@
*/
package org.owasp.dependencycheck.analyzer;
import java.io.FileFilter;
/**
* An Analyzer that scans specific file types.
*
* @author Jeremy Long
*/
public interface FileTypeAnalyzer extends Analyzer {
/**
* Returns whether or not this analyzer can process the given extension.
*
* @param extension the file extension to test for support.
* @return whether or not the specified file extension is supported by this analyzer.
*/
boolean supportsExtension(String extension);
public interface FileTypeAnalyzer extends Analyzer, FileFilter {
/**
* Resets the analyzers state.

View File

@@ -89,30 +89,43 @@ public class HintAnalyzer extends AbstractAnalyzer implements Analyzer {
"spring-core",
Confidence.HIGH);
final Evidence springTest4 = new Evidence("Manifest",
"Bundle-Vendor",
"SpringSource",
Confidence.HIGH);
final Evidence springTest5 = new Evidence("jar",
final Evidence springTest4 = new Evidence("jar",
"package name",
"springframework",
Confidence.LOW);
final Evidence springSecurityTest1 = new Evidence("Manifest",
"Bundle-Name",
"Spring Security Core",
Confidence.MEDIUM);
final Evidence springSecurityTest2 = new Evidence("pom",
"artifactid",
"spring-security-core",
Confidence.HIGH);
//springsource/vware problem
final Set<Evidence> product = dependency.getProductEvidence().getEvidence();
final Set<Evidence> vendor = dependency.getVendorEvidence().getEvidence();
if (product.contains(springTest1) || product.contains(springTest2) || product.contains(springTest3)
|| (dependency.getFileName().contains("spring") && (product.contains(springTest5) || vendor.contains(springTest5)))) {
|| (dependency.getFileName().contains("spring") && product.contains(springTest4))) {
dependency.getProductEvidence().addEvidence("hint analyzer", "product", "springsource spring framework", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "SpringSource", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "vmware", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "pivotal", Confidence.HIGH);
}
if (vendor.contains(springTest4)) {
dependency.getProductEvidence().addEvidence("hint analyzer", "product", "springsource_spring_framework", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "vmware", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "pivotal", Confidence.HIGH);
}
if (product.contains(springSecurityTest1) || product.contains(springSecurityTest2)) {
dependency.getProductEvidence().addEvidence("hint analyzer", "product", "springsource_spring_security", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "SpringSource", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "vmware", Confidence.HIGH);
}
//sun/oracle problem

View File

@@ -19,6 +19,7 @@ package org.owasp.dependencycheck.analyzer;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileFilter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
@@ -39,8 +40,6 @@ import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
import org.jsoup.Jsoup;
@@ -49,11 +48,14 @@ import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.EvidenceCollection;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.xml.pom.License;
import org.owasp.dependencycheck.xml.pom.PomUtils;
import org.owasp.dependencycheck.xml.pom.Model;
import org.owasp.dependencycheck.utils.FileUtils;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Used to load a JAR file and collect information that can be used to determine the associated CPE.
@@ -66,7 +68,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(JarAnalyzer.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(JarAnalyzer.class);
/**
* The buffer size to use when extracting files from the archive.
*/
@@ -116,11 +118,18 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
"tool",
"bundle-manifestversion",
"bundlemanifestversion",
"bundle-vendor",
"include-resource",
"embed-dependency",
"ipojo-components",
"ipojo-extension",
"eclipse-sourcereferences");
/**
* Deprecated Jar manifest attribute, that is, nonetheless, useful for analysis.
*/
@SuppressWarnings("deprecation")
private static final String IMPLEMENTATION_VENDOR_ID = Attributes.Name.IMPLEMENTATION_VENDOR_ID
.toString();
/**
* item in some manifest, should be considered medium confidence.
*/
@@ -133,10 +142,6 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* item in some manifest, should be considered medium confidence.
*/
private static final String BUNDLE_NAME = "Bundle-Name"; //: Struts 2 Core
/**
* item in some manifest, should be considered medium confidence.
*/
private static final String BUNDLE_VENDOR = "Bundle-Vendor"; //: Apache Software Foundation
/**
* A pattern to detect HTML within text.
*/
@@ -161,16 +166,21 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The set of file extensions supported by this analyzer.
*/
private static final Set<String> EXTENSIONS = newHashSet("jar", "war");
private static final String[] EXTENSIONS = {"jar", "war"};
/**
* Returns a list of file EXTENSIONS supported by this analyzer.
* The file filter used to determine which files this analyzer supports.
*/
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build();
/**
* Returns the FileFilter.
*
* @return a list of file EXTENSIONS supported by this analyzer.
* @return the FileFilter
*/
@Override
public Set<String> getSupportedExtensions() {
return EXTENSIONS;
protected FileFilter getFileFilter() {
return FILTER;
}
/**
@@ -248,20 +258,16 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
try {
jar = new JarFile(dependency.getActualFilePath());
} catch (IOException ex) {
final String msg = String.format("Unable to read JarFile '%s'.", dependency.getActualFilePath());
//final AnalysisException ax = new AnalysisException(msg, ex);
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, "", ex);
LOGGER.warn("Unable to read JarFile '{}'.", dependency.getActualFilePath());
LOGGER.trace("", ex);
return false;
}
List<String> pomEntries;
try {
pomEntries = retrievePomListing(jar);
} catch (IOException ex) {
final String msg = String.format("Unable to read Jar file entries in '%s'.", dependency.getActualFilePath());
//final AnalysisException ax = new AnalysisException(msg, ex);
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, msg, ex);
LOGGER.warn("Unable to read Jar file entries in '{}'.", dependency.getActualFilePath());
LOGGER.trace("", ex);
return false;
}
File externalPom = null;
@@ -276,14 +282,14 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
}
}
for (String path : pomEntries) {
LOGGER.fine(String.format("Reading pom entry: %s", path));
LOGGER.debug("Reading pom entry: {}", path);
Properties pomProperties = null;
try {
if (externalPom == null) {
pomProperties = retrievePomProperties(path, jar);
}
} catch (IOException ex) {
LOGGER.log(Level.FINEST, "ignore this, failed reading a non-existent pom.properties", ex);
LOGGER.trace("ignore this, failed reading a non-existent pom.properties", ex);
}
Model pom = null;
try {
@@ -317,9 +323,8 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
foundSomething |= setPomEvidence(dependency, pom, classes);
}
} catch (AnalysisException ex) {
final String msg = String.format("An error occured while analyzing '%s'.", dependency.getActualFilePath());
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, "", ex);
LOGGER.warn("An error occured while analyzing '{}'.", dependency.getActualFilePath());
LOGGER.trace("", ex);
}
}
return foundSomething;
@@ -343,13 +348,13 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
reader = new InputStreamReader(jar.getInputStream(propEntry), "UTF-8");
pomProperties = new Properties();
pomProperties.load(reader);
LOGGER.fine(String.format("Read pom.properties: %s", propPath));
LOGGER.debug("Read pom.properties: {}", propPath);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, "close error", ex);
LOGGER.trace("close error", ex);
}
}
}
@@ -371,7 +376,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
final JarEntry entry = entries.nextElement();
final String entryName = (new File(entry.getName())).getName().toLowerCase();
if (!entry.isDirectory() && "pom.xml".equals(entryName)) {
LOGGER.fine(String.format("POM Entry found: %s", entry.getName()));
LOGGER.trace("POM Entry found: {}", entry.getName());
pomEntries.add(entry.getName());
}
}
@@ -386,7 +391,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* @param dependency the dependency being analyzed
* @return returns the POM object
* @throws AnalysisException is thrown if there is an exception extracting or parsing the POM
* {@link org.owasp.dependencycheck.jaxb.pom.generated.Model} object
* {@link org.owasp.dependencycheck.xml.pom.Model} object
*/
private Model extractPom(String path, JarFile jar, Dependency dependency) throws AnalysisException {
InputStream input = null;
@@ -407,9 +412,8 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
bos.flush();
dependency.setActualFilePath(file.getAbsolutePath());
} catch (IOException ex) {
final String msg = String.format("An error occurred reading '%s' from '%s'.", path, dependency.getFilePath());
LOGGER.warning(msg);
LOGGER.log(Level.SEVERE, "", ex);
LOGGER.warn("An error occurred reading '{}' from '{}'.", path, dependency.getFilePath());
LOGGER.error("", ex);
} finally {
closeStream(bos);
closeStream(fos);
@@ -428,7 +432,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
try {
stream.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
}
}
@@ -443,7 +447,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
try {
stream.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
}
}
@@ -565,7 +569,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
//Description
final String description = pom.getDescription();
if (description != null && !description.isEmpty()) {
if (description != null && !description.isEmpty() && !description.startsWith("POM was created by")) {
foundSomething = true;
final String trimmedDescription = addDescription(dependency, description, "pom", "description");
addMatchingValues(classes, trimmedDescription, dependency.getVendorEvidence());
@@ -643,9 +647,8 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
&& !dependency.getFileName().toLowerCase().endsWith("-javadoc.jar")
&& !dependency.getFileName().toLowerCase().endsWith("-src.jar")
&& !dependency.getFileName().toLowerCase().endsWith("-doc.jar")) {
LOGGER.log(Level.FINE,
String.format("Jar file '%s' does not contain a manifest.",
dependency.getFileName()));
LOGGER.debug("Jar file '{}' does not contain a manifest.",
dependency.getFileName());
}
return false;
}
@@ -657,6 +660,9 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
final String source = "Manifest";
String specificationVersion = null;
boolean hasImplementationVersion = false;
for (Entry<Object, Object> entry : atts.entrySet()) {
String key = entry.getKey().toString();
String value = atts.getValue(key);
@@ -670,13 +676,16 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
productEvidence.addEvidence(source, key, value, Confidence.HIGH);
addMatchingValues(classInformation, value, productEvidence);
} else if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_VERSION.toString())) {
hasImplementationVersion = true;
foundSomething = true;
versionEvidence.addEvidence(source, key, value, Confidence.HIGH);
} else if ("specification-version".equalsIgnoreCase(key)) {
specificationVersion = key;
} else if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_VENDOR.toString())) {
foundSomething = true;
vendorEvidence.addEvidence(source, key, value, Confidence.HIGH);
addMatchingValues(classInformation, value, vendorEvidence);
} else if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_VENDOR_ID.toString())) {
} else if (key.equalsIgnoreCase(IMPLEMENTATION_VENDOR_ID)) {
foundSomething = true;
vendorEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
addMatchingValues(classInformation, value, vendorEvidence);
@@ -689,10 +698,11 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
foundSomething = true;
productEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
addMatchingValues(classInformation, value, productEvidence);
} else if (key.equalsIgnoreCase(BUNDLE_VENDOR)) {
foundSomething = true;
vendorEvidence.addEvidence(source, key, value, Confidence.HIGH);
addMatchingValues(classInformation, value, vendorEvidence);
// //the following caused false positives.
// } else if (key.equalsIgnoreCase(BUNDLE_VENDOR)) {
// foundSomething = true;
// vendorEvidence.addEvidence(source, key, value, Confidence.HIGH);
// addMatchingValues(classInformation, value, vendorEvidence);
} else if (key.equalsIgnoreCase(BUNDLE_VERSION)) {
foundSomething = true;
versionEvidence.addEvidence(source, key, value, Confidence.HIGH);
@@ -723,9 +733,9 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
foundSomething = true;
if (key.contains("version")) {
if (key.contains("specification")) {
versionEvidence.addEvidence(source, key, value, Confidence.LOW);
} else {
if (!key.contains("specification")) {
//versionEvidence.addEvidence(source, key, value, Confidence.LOW);
//} else {
versionEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
}
} else if ("build-id".equals(key)) {
@@ -777,6 +787,10 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
}
}
}
if (specificationVersion != null && !hasImplementationVersion) {
foundSomething = true;
versionEvidence.addEvidence(source, "specificationn-version", specificationVersion, Confidence.HIGH);
}
} finally {
if (jar != null) {
jar.close();
@@ -890,11 +904,10 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
@Override
public void close() {
if (tempFileLocation != null && tempFileLocation.exists()) {
LOGGER.log(Level.FINE, "Attempting to delete temporary files");
LOGGER.debug("Attempting to delete temporary files");
final boolean success = FileUtils.delete(tempFileLocation);
if (!success) {
LOGGER.log(Level.WARNING,
"Failed to delete some temporary files, see the log for more details");
LOGGER.warn("Failed to delete some temporary files, see the log for more details");
}
}
}
@@ -924,9 +937,9 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
JarFile jar = null;
try {
jar = new JarFile(dependency.getActualFilePath());
final Enumeration entries = jar.entries();
final Enumeration<JarEntry> entries = jar.entries();
while (entries.hasMoreElements()) {
final JarEntry entry = (JarEntry) entries.nextElement();
final JarEntry entry = entries.nextElement();
final String name = entry.getName().toLowerCase();
//no longer stripping "|com\\.sun" - there are some com.sun jar files with CVEs.
if (name.endsWith(".class") && !name.matches("^javax?\\..*$")) {
@@ -935,15 +948,14 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
}
}
} catch (IOException ex) {
final String msg = String.format("Unable to open jar file '%s'.", dependency.getFileName());
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.warn("Unable to open jar file '{}'.", dependency.getFileName());
LOGGER.debug("", ex);
} finally {
if (jar != null) {
try {
jar.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
}
}

View File

@@ -1,141 +0,0 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2014 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Settings;
/**
*
* Used to analyze a JavaScript file to gather information to aid in identification of a CPE identifier.
*
* @author Jeremy Long
*/
public class JavaScriptAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(JavaScriptAnalyzer.class.getName());
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/**
* The name of the analyzer.
*/
private static final String ANALYZER_NAME = "JavaScript Analyzer";
/**
* The phase that this analyzer is intended to run in.
*/
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
/**
* The set of file extensions supported by this analyzer.
*/
private static final Set<String> EXTENSIONS = newHashSet("js");
/**
* Returns a list of file EXTENSIONS supported by this analyzer.
*
* @return a list of file EXTENSIONS supported by this analyzer.
*/
@Override
public Set<String> getSupportedExtensions() {
return EXTENSIONS;
}
/**
* Returns the name of the analyzer.
*
* @return the name of the analyzer.
*/
@Override
public String getName() {
return ANALYZER_NAME;
}
/**
* Returns the phase that the analyzer is intended to run in.
*
* @return the phase that the analyzer is intended to run in.
*/
@Override
public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE;
}
//</editor-fold>
/**
* Returns the key used in the properties file to reference the analyzer's enabled property.
*
* @return the analyzer's enabled property setting key
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_JAVASCRIPT_ENABLED;
}
/**
* Loads a specified JavaScript file and collects information from the copyright information contained within.
*
* @param dependency the dependency to analyze.
* @param engine the engine that is scanning the dependencies
* @throws AnalysisException is thrown if there is an error reading the JavaScript file.
*/
@Override
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
BufferedReader fin = null;
try {
// /\*([^\*][^/]|[\r\n\f])+?\*/
final Pattern extractComments = Pattern.compile("(/\\*([^*]|[\\r\\n]|(\\*+([^*/]|[\\r\\n])))*\\*+/)|(//.*)", Pattern.MULTILINE);
File file = dependency.getActualFile();
fin = new BufferedReader(new FileReader(file));
StringBuilder sb = new StringBuilder(2000);
String text;
while ((text = fin.readLine()) != null) {
sb.append(text);
}
} catch (FileNotFoundException ex) {
final String msg = String.format("Dependency file not found: '%s'", dependency.getActualFilePath());
throw new AnalysisException(msg, ex);
} catch (IOException ex) {
LOGGER.log(Level.SEVERE, null, ex);
} finally {
if (fin != null) {
try {
fin.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
}
}
}
}
@Override
protected void initializeFileTypeAnalyzer() throws Exception {
}
}

View File

@@ -17,14 +17,6 @@
*/
package org.owasp.dependencycheck.analyzer;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.io.FileUtils;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
@@ -34,9 +26,19 @@ import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Evidence;
import org.owasp.dependencycheck.xml.pom.PomUtils;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileFilter;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.Downloader;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
/**
@@ -63,7 +65,7 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(NexusAnalyzer.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(NexusAnalyzer.class);
/**
* The name of the analyzer.
@@ -78,7 +80,7 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The types of files on which this will work.
*/
private static final Set<String> SUPPORTED_EXTENSIONS = newHashSet("jar");
private static final String SUPPORTED_EXTENSIONS = "jar";
/**
* The Nexus Search to be set up for this analyzer.
@@ -107,10 +109,10 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.info("Enabling Nexus analyzer");
retval = true;
} else {
LOGGER.fine("Nexus analyzer disabled, using Central instead");
LOGGER.debug("Nexus analyzer disabled, using Central instead");
}
} catch (InvalidSettingException ise) {
LOGGER.warning("Invalid setting. Disabling Nexus analyzer");
LOGGER.warn("Invalid setting. Disabling Nexus analyzer");
}
return retval;
@@ -133,21 +135,21 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
*/
@Override
public void initializeFileTypeAnalyzer() throws Exception {
LOGGER.fine("Initializing Nexus Analyzer");
LOGGER.fine(String.format("Nexus Analyzer enabled: %s", isEnabled()));
LOGGER.debug("Initializing Nexus Analyzer");
LOGGER.debug("Nexus Analyzer enabled: {}", isEnabled());
if (isEnabled()) {
final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL);
LOGGER.fine(String.format("Nexus Analyzer URL: %s", searchUrl));
LOGGER.debug("Nexus Analyzer URL: {}", searchUrl);
try {
searcher = new NexusSearch(new URL(searchUrl));
if (!searcher.preflightRequest()) {
LOGGER.warning("There was an issue getting Nexus status. Disabling analyzer.");
LOGGER.warn("There was an issue getting Nexus status. Disabling analyzer.");
setEnabled(false);
}
} catch (MalformedURLException mue) {
// I know that initialize can throw an exception, but we'll
// just disable the analyzer if the URL isn't valid
LOGGER.warning(String.format("Property %s not a valid URL. Nexus Analyzer disabled", searchUrl));
LOGGER.warn("Property {} not a valid URL. Nexus Analyzer disabled", searchUrl);
setEnabled(false);
}
}
@@ -184,13 +186,18 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
}
/**
* Returns the extensions for which this Analyzer runs.
* The file filter used to determine which files this analyzer supports.
*/
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(SUPPORTED_EXTENSIONS).build();
/**
* Returns the FileFilter
*
* @return the extensions for which this Analyzer runs
* @return the FileFilter
*/
@Override
public Set<String> getSupportedExtensions() {
return SUPPORTED_EXTENSIONS;
protected FileFilter getFileFilter() {
return FILTER;
}
/**
@@ -209,7 +216,7 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
final MavenArtifact ma = searcher.searchSha1(dependency.getSha1sum());
dependency.addAsEvidence("nexus", ma, Confidence.HIGH);
boolean pomAnalyzed = false;
LOGGER.fine("POM URL " + ma.getPomUrl());
LOGGER.debug("POM URL {}", ma.getPomUrl());
for (Evidence e : dependency.getVendorEvidence()) {
if ("pom".equals(e.getSource())) {
pomAnalyzed = true;
@@ -222,18 +229,16 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
final File baseDir = Settings.getTempDirectory();
pomFile = File.createTempFile("pom", ".xml", baseDir);
if (!pomFile.delete()) {
final String msg = String.format("Unable to fetch pom.xml for %s from Nexus repository; "
LOGGER.warn("Unable to fetch pom.xml for {} from Nexus repository; "
+ "this could result in undetected CPE/CVEs.", dependency.getFileName());
LOGGER.warning(msg);
LOGGER.fine("Unable to delete temp file");
LOGGER.debug("Unable to delete temp file");
}
LOGGER.fine(String.format("Downloading %s", ma.getPomUrl()));
LOGGER.debug("Downloading {}", ma.getPomUrl());
Downloader.fetchFile(new URL(ma.getPomUrl()), pomFile);
PomUtils.analyzePOM(dependency, pomFile);
} catch (DownloadFailedException ex) {
final String msg = String.format("Unable to download pom.xml for %s from Nexus repository; "
LOGGER.warn("Unable to download pom.xml for {} from Nexus repository; "
+ "this could result in undetected CPE/CVEs.", dependency.getFileName());
LOGGER.warning(msg);
} finally {
if (pomFile != null && !FileUtils.deleteQuietly(pomFile)) {
pomFile.deleteOnExit();
@@ -245,11 +250,11 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.info(String.format("invalid sha-1 hash on %s", dependency.getFileName()));
} catch (FileNotFoundException fnfe) {
//dependency.addAnalysisException(new AnalysisException("Artifact not found on repository"));
LOGGER.fine(String.format("Artifact not found in repository '%s'", dependency.getFileName()));
LOGGER.log(Level.FINE, fnfe.getMessage(), fnfe);
LOGGER.debug("Artifact not found in repository '{}'", dependency.getFileName());
LOGGER.debug(fnfe.getMessage(), fnfe);
} catch (IOException ioe) {
//dependency.addAnalysisException(new AnalysisException("Could not connect to repository", ioe));
LOGGER.log(Level.FINE, "Could not connect to nexus repository", ioe);
LOGGER.debug("Could not connect to nexus repository", ioe);
}
}
}

View File

@@ -17,12 +17,6 @@
*/
package org.owasp.dependencycheck.analyzer;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.data.nuget.NugetPackage;
@@ -31,7 +25,15 @@ import org.owasp.dependencycheck.data.nuget.NuspecParser;
import org.owasp.dependencycheck.data.nuget.XPathNuspecParser;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
/**
* Analyzer which will parse a Nuspec file to gather module information.
@@ -43,7 +45,7 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(NuspecAnalyzer.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(NuspecAnalyzer.class);
/**
* The name of the analyzer.
@@ -58,7 +60,7 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The types of files on which this will work.
*/
private static final Set<String> SUPPORTED_EXTENSIONS = newHashSet("nuspec");
private static final String SUPPORTED_EXTENSIONS = "nuspec";
/**
* Initializes the analyzer once before any analysis is performed.
@@ -100,13 +102,19 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
}
/**
* Returns the extensions for which this Analyzer runs.
* The file filter used to determine which files this analyzer supports.
*/
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(
SUPPORTED_EXTENSIONS).build();
/**
* Returns the FileFilter
*
* @return the extensions for which this Analyzer runs
* @return the FileFilter
*/
@Override
public Set<String> getSupportedExtensions() {
return SUPPORTED_EXTENSIONS;
protected FileFilter getFileFilter() {
return FILTER;
}
/**
@@ -118,7 +126,7 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
*/
@Override
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
LOGGER.log(Level.FINE, "Checking Nuspec file {0}", dependency.toString());
LOGGER.debug("Checking Nuspec file {}", dependency.toString());
try {
final NuspecParser parser = new XPathNuspecParser();
NugetPackage np = null;
@@ -135,7 +143,7 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
try {
fis.close();
} catch (IOException e) {
LOGGER.fine("Error closing input stream");
LOGGER.debug("Error closing input stream");
}
}
}

View File

@@ -0,0 +1,175 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 Institute for Defense Analyses. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import org.apache.commons.io.FileUtils;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Used to analyze OpenSSL source code present in the file system.
*
* @author Dale Visser <dvisser@ida.org>
*/
public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer {
private static final int HEXADECIMAL = 16;
/**
* Filename to analyze. All other .h files get removed from consideration.
*/
private static final String OPENSSLV_H = "opensslv.h";
/**
* Filter that detects files named "__init__.py".
*/
private static final FileFilter OPENSSLV_FILTER = FileFilterBuilder.newInstance().addFilenames(OPENSSLV_H).build();
private static final Pattern VERSION_PATTERN = Pattern.compile(
"define\\s+OPENSSL_VERSION_NUMBER\\s+0x([0-9a-zA-Z]{8})L", Pattern.DOTALL
| Pattern.CASE_INSENSITIVE);
private static final int MAJOR_OFFSET = 28;
private static final long MINOR_MASK = 0x0ff00000L;
private static final int MINOR_OFFSET = 20;
private static final long FIX_MASK = 0x000ff000L;
private static final int FIX_OFFSET = 12;
private static final long PATCH_MASK = 0x00000ff0L;
private static final int PATCH_OFFSET = 4;
private static final int NUM_LETTERS = 26;
private static final int STATUS_MASK = 0x0000000f;
/**
* Returns the open SSL version as a string.
*
* @param openSSLVersionConstant The open SSL version
* @return the version of openssl
*/
static String getOpenSSLVersion(long openSSLVersionConstant) {
final long major = openSSLVersionConstant >>> MAJOR_OFFSET;
final long minor = (openSSLVersionConstant & MINOR_MASK) >>> MINOR_OFFSET;
final long fix = (openSSLVersionConstant & FIX_MASK) >>> FIX_OFFSET;
final long patchLevel = (openSSLVersionConstant & PATCH_MASK) >>> PATCH_OFFSET;
final String patch = 0 == patchLevel || patchLevel > NUM_LETTERS ? "" : String.valueOf((char) (patchLevel + 'a' - 1));
final int statusCode = (int) (openSSLVersionConstant & STATUS_MASK);
final String status = 0xf == statusCode ? "" : (0 == statusCode ? "-dev" : "-beta" + statusCode);
return String.format("%d.%d.%d%s%s", major, minor, fix, patch, status);
}
/**
* Returns the name of the Python Package Analyzer.
*
* @return the name of the analyzer
*/
@Override
public String getName() {
return "OpenSSL Source Analyzer";
}
/**
* Tell that we are used for information collection.
*
* @return INFORMATION_COLLECTION
*/
@Override
public AnalysisPhase getAnalysisPhase() {
return AnalysisPhase.INFORMATION_COLLECTION;
}
/**
* Returns the set of supported file extensions.
*
* @return the set of supported file extensions
*/
@Override
protected FileFilter getFileFilter() {
return OPENSSLV_FILTER;
}
/**
* No-op initializer implementation.
*
* @throws Exception never thrown
*/
@Override
protected void initializeFileTypeAnalyzer() throws Exception {
// Nothing to do here.
}
/**
* Analyzes python packages and adds evidence to the dependency.
*
* @param dependency the dependency being analyzed
* @param engine the engine being used to perform the scan
* @throws AnalysisException thrown if there is an unrecoverable error analyzing the dependency
*/
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
throws AnalysisException {
final File file = dependency.getActualFile();
final String parentName = file.getParentFile().getName();
boolean found = false;
final String contents = getFileContents(file);
if (!contents.isEmpty()) {
final Matcher matcher = VERSION_PATTERN.matcher(contents);
if (matcher.find()) {
dependency.getVersionEvidence().addEvidence(OPENSSLV_H, "Version Constant",
getOpenSSLVersion(Long.parseLong(matcher.group(1), HEXADECIMAL)), Confidence.HIGH);
found = true;
}
}
if (found) {
dependency.setDisplayFileName(parentName + File.separatorChar + OPENSSLV_H);
dependency.getVendorEvidence().addEvidence(OPENSSLV_H, "Vendor", "OpenSSL", Confidence.HIGHEST);
dependency.getProductEvidence().addEvidence(OPENSSLV_H, "Product", "OpenSSL", Confidence.HIGHEST);
} else {
engine.getDependencies().remove(dependency);
}
}
/**
* Retrieves the contents of a given file.
*
* @param actualFile the file to read
* @return the contents of the file
* @throws AnalysisException thrown if there is an IO Exception
*/
private String getFileContents(final File actualFile)
throws AnalysisException {
String contents;
try {
contents = FileUtils.readFileToString(actualFile).trim();
} catch (IOException e) {
throw new AnalysisException(
"Problem occurred while reading dependency file.", e);
}
return contents;
}
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_OPENSSL_ENABLED;
}
}

View File

@@ -19,17 +19,10 @@ package org.owasp.dependencycheck.analyzer;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FilenameFilter;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import javax.mail.MessagingException;
import javax.mail.internet.InternetHeaders;
import org.apache.commons.io.filefilter.NameFileFilter;
import org.apache.commons.io.filefilter.SuffixFileFilter;
import org.apache.commons.io.input.AutoCloseInputStream;
@@ -39,8 +32,14 @@ import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.EvidenceCollection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.mail.MessagingException;
import javax.mail.internet.InternetHeaders;
import org.owasp.dependencycheck.utils.ExtractionException;
import org.owasp.dependencycheck.utils.ExtractionUtil;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.FileUtils;
import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.UrlStringUtils;
@@ -66,8 +65,8 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
private static final Logger LOGGER = Logger
.getLogger(PythonDistributionAnalyzer.class.getName());
private static final Logger LOGGER = LoggerFactory
.getLogger(PythonDistributionAnalyzer.class);
/**
* The count of directories created during analysis. This is used for creating temporary directories.
@@ -86,13 +85,17 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The set of file extensions supported by this analyzer.
*/
private static final Set<String> EXTENSIONS = newHashSet("whl", "egg",
"zip", METADATA, PKG_INFO);
private static final String[] EXTENSIONS = {"whl", "egg", "zip"};
/**
* Used to match on egg archive candidate extenssions.
* Used to match on egg archive candidate extensions.
*/
private static final Pattern EGG_OR_ZIP = Pattern.compile("egg|zip");
private static final FileFilter EGG_OR_ZIP = FileFilterBuilder.newInstance().addExtensions("egg", "zip").build();
/**
* Used to detect files with a .whl extension.
*/
private static final FileFilter WHL_FILTER = FileFilterBuilder.newInstance().addExtensions("whl").build();
/**
* The parent directory for the individual directories per archive.
@@ -114,23 +117,29 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
/**
* Filter that detects files named "METADATA".
*/
private static final FilenameFilter METADATA_FILTER = new NameFileFilter(
private static final NameFileFilter METADATA_FILTER = new NameFileFilter(
METADATA);
/**
* Filter that detects files named "PKG-INFO".
*/
private static final FilenameFilter PKG_INFO_FILTER = new NameFileFilter(
private static final NameFileFilter PKG_INFO_FILTER = new NameFileFilter(
PKG_INFO);
/**
* Returns a list of file EXTENSIONS supported by this analyzer.
* The file filter used to determine which files this analyzer supports.
*/
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addFileFilters(
METADATA_FILTER, PKG_INFO_FILTER).addExtensions(EXTENSIONS).build();
/**
* Returns the FileFilter
*
* @return a list of file EXTENSIONS supported by this analyzer.
* @return the FileFilter
*/
@Override
public Set<String> getSupportedExtensions() {
return EXTENSIONS;
protected FileFilter getFileFilter() {
return FILTER;
}
/**
@@ -148,6 +157,7 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
*
* @return the phase that the analyzer is intended to run in.
*/
@Override
public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE;
}
@@ -165,16 +175,14 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
throws AnalysisException {
if ("whl".equals(dependency.getFileExtension())) {
final File actualFile = dependency.getActualFile();
if (WHL_FILTER.accept(actualFile)) {
collectMetadataFromArchiveFormat(dependency, DIST_INFO_FILTER,
METADATA_FILTER);
} else if (EGG_OR_ZIP.matcher(
StringUtils.stripToEmpty(dependency.getFileExtension()))
.matches()) {
} else if (EGG_OR_ZIP.accept(actualFile)) {
collectMetadataFromArchiveFormat(dependency, EGG_INFO_FILTER,
PKG_INFO_FILTER);
} else {
final File actualFile = dependency.getActualFile();
final String name = actualFile.getName();
final boolean metadata = METADATA.equals(name);
if (metadata || PKG_INFO.equals(name)) {
@@ -203,7 +211,7 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
FilenameFilter folderFilter, FilenameFilter metadataFilter)
throws AnalysisException {
final File temp = getNextTempDirectory();
LOGGER.fine(String.format("%s exists? %b", temp, temp.exists()));
LOGGER.debug("{} exists? {}", temp, temp.exists());
try {
ExtractionUtil.extractFilesUsingFilter(
new File(dependency.getActualFilePath()), temp,
@@ -247,10 +255,10 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
@Override
public void close() {
if (tempFileLocation != null && tempFileLocation.exists()) {
LOGGER.log(Level.FINE, "Attempting to delete temporary files");
LOGGER.debug("Attempting to delete temporary files");
final boolean success = FileUtils.delete(tempFileLocation);
if (!success) {
LOGGER.log(Level.WARNING,
LOGGER.warn(
"Failed to delete some temporary files, see the log for more details");
}
}
@@ -298,7 +306,7 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
private static void addPropertyToEvidence(InternetHeaders headers,
EvidenceCollection evidence, String property, Confidence confidence) {
final String value = headers.getHeader(property, null);
LOGGER.fine(String.format("Property: %s, Value: %s", property, value));
LOGGER.debug("Property: {}, Value: {}", property, value);
if (StringUtils.isNotBlank(value)) {
evidence.addEvidence(METADATA, property, value, confidence);
}
@@ -329,15 +337,15 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
private static InternetHeaders getManifestProperties(File manifest) {
final InternetHeaders result = new InternetHeaders();
if (null == manifest) {
LOGGER.fine("Manifest file not found.");
LOGGER.debug("Manifest file not found.");
} else {
try {
result.load(new AutoCloseInputStream(new BufferedInputStream(
new FileInputStream(manifest))));
} catch (MessagingException e) {
LOGGER.log(Level.WARNING, e.getMessage(), e);
LOGGER.warn(e.getMessage(), e);
} catch (FileNotFoundException e) {
LOGGER.log(Level.WARNING, e.getMessage(), e);
LOGGER.warn(e.getMessage(), e);
}
}
return result;

View File

@@ -17,18 +17,6 @@
*/
package org.owasp.dependencycheck.analyzer;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.filefilter.NameFileFilter;
import org.apache.commons.io.filefilter.SuffixFileFilter;
@@ -37,8 +25,20 @@ import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.EvidenceCollection;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.UrlStringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Used to analyze a Python package, and collect information that can be used to determine the associated CPE.
@@ -56,14 +56,13 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
private static final Logger LOGGER = Logger
.getLogger(PythonDistributionAnalyzer.class.getName());
private static final Logger LOGGER = LoggerFactory
.getLogger(PythonPackageAnalyzer.class);
/**
* Filename extensions for files to be analyzed.
*/
private static final Set<String> EXTENSIONS = Collections
.unmodifiableSet(Collections.singleton("py"));
private static final String EXTENSIONS = "py";
/**
* Pattern for matching the module docstring in a source file.
@@ -134,13 +133,18 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
}
/**
* Returns the set of supported file extensions.
* The file filter used to determine which files this analyzer supports.
*/
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build();
/**
* Returns the FileFilter
*
* @return the set of supported file extensions
* @return the FileFilter
*/
@Override
protected Set<String> getSupportedExtensions() {
return EXTENSIONS;
protected FileFilter getFileFilter() {
return FILTER;
}
/**
@@ -208,12 +212,12 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
*/
private boolean analyzeFileContents(Dependency dependency, File file)
throws AnalysisException {
String contents = "";
String contents;
try {
contents = FileUtils.readFileToString(file).trim();
} catch (IOException e) {
throw new AnalysisException(
"Problem occured while reading dependency file.", e);
"Problem occurred while reading dependency file.", e);
}
boolean found = false;
if (!contents.isEmpty()) {
@@ -240,7 +244,7 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
found |= gatherHomePageEvidence(HOMEPAGE_PATTERN,
vendorEvidence, source, "HomePage", contents);
} catch (MalformedURLException e) {
LOGGER.warning(e.getMessage());
LOGGER.warn(e.getMessage());
}
}
return found;

View File

@@ -23,7 +23,6 @@ import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.xpath.XPath;
@@ -32,6 +31,8 @@ import javax.xml.xpath.XPathFactory;
import org.owasp.dependencycheck.data.nexus.MavenArtifact;
import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.URLConnectionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.NodeList;
@@ -55,7 +56,7 @@ public class CentralSearch {
/**
* Used for logging.
*/
private static final Logger LOGGER = Logger.getLogger(CentralSearch.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(CentralSearch.class);
/**
* Creates a NexusSearch for the given repository URL.
@@ -67,10 +68,10 @@ public class CentralSearch {
this.rootURL = rootURL;
if (null != Settings.getString(Settings.KEYS.PROXY_SERVER)) {
useProxy = true;
LOGGER.fine("Using proxy");
LOGGER.debug("Using proxy");
} else {
useProxy = false;
LOGGER.fine("Not using proxy");
LOGGER.debug("Not using proxy");
}
}
@@ -89,7 +90,7 @@ public class CentralSearch {
final URL url = new URL(rootURL + String.format("?q=1:\"%s\"&wt=xml", sha1));
LOGGER.fine(String.format("Searching Central url %s", url.toString()));
LOGGER.debug("Searching Central url {}", url.toString());
// Determine if we need to use a proxy. The rules:
// 1) If the proxy is set, AND the setting is set to true, use the proxy
@@ -119,9 +120,9 @@ public class CentralSearch {
final NodeList docs = (NodeList) xpath.evaluate("/response/result/doc", doc, XPathConstants.NODESET);
for (int i = 0; i < docs.getLength(); i++) {
final String g = xpath.evaluate("./str[@name='g']", docs.item(i));
LOGGER.finest(String.format("GroupId: %s", g));
LOGGER.trace("GroupId: {}", g);
final String a = xpath.evaluate("./str[@name='a']", docs.item(i));
LOGGER.finest(String.format("ArtifactId: %s", a));
LOGGER.trace("ArtifactId: {}", a);
final String v = xpath.evaluate("./str[@name='v']", docs.item(i));
NodeList atts = (NodeList) xpath.evaluate("./arr[@name='ec']/str", docs.item(i), XPathConstants.NODESET);
boolean pomAvailable = false;
@@ -144,7 +145,7 @@ public class CentralSearch {
}
}
LOGGER.finest(String.format("Version: %s", v));
LOGGER.trace("Version: {}", v);
result.add(new MavenArtifact(g, a, v, jarAvailable, pomAvailable, useHTTPS));
}
@@ -160,10 +161,9 @@ public class CentralSearch {
throw new FileNotFoundException("Artifact not found in Central");
}
} else {
final String msg = String.format("Could not connect to Central received response code: %d %s",
conn.getResponseCode(), conn.getResponseMessage());
LOGGER.fine(msg);
throw new IOException(msg);
LOGGER.debug("Could not connect to Central received response code: {} {}",
conn.getResponseCode(), conn.getResponseMessage());
throw new IOException("Could not connect to Central");
}
return null;

View File

@@ -21,8 +21,6 @@ import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper;
@@ -46,6 +44,8 @@ import org.owasp.dependencycheck.data.lucene.SearchFieldAnalyzer;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.utils.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An in memory lucene index that contains the vendor/product combinations from the CPE (application) identifiers within the NVD
@@ -58,7 +58,7 @@ public final class CpeMemoryIndex {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(CpeMemoryIndex.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(CpeMemoryIndex.class);
/**
* singleton instance.
*/
@@ -203,7 +203,7 @@ public final class CpeMemoryIndex {
try {
indexReader.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
indexReader = null;
}
@@ -235,7 +235,7 @@ public final class CpeMemoryIndex {
saveEntry(pair.getLeft(), pair.getRight(), indexWriter);
}
} catch (DatabaseException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new IndexException("Error reading CPE data", ex);
}
} catch (CorruptIndexException ex) {

View File

@@ -17,12 +17,13 @@
*/
package org.owasp.dependencycheck.data.cwe;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.util.HashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
@@ -33,7 +34,7 @@ public final class CweDB {
/**
* The Logger.
*/
private static final Logger LOGGER = Logger.getLogger(CweDB.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(CweDB.class);
/**
* Empty private constructor as this is a utility class.
@@ -61,17 +62,17 @@ public final class CweDB {
final HashMap<String, String> ret = (HashMap<String, String>) oin.readObject();
return ret;
} catch (ClassNotFoundException ex) {
LOGGER.log(Level.WARNING, "Unable to load CWE data. This should not be an issue.");
LOGGER.log(Level.FINE, null, ex);
LOGGER.warn("Unable to load CWE data. This should not be an issue.");
LOGGER.debug("", ex);
} catch (IOException ex) {
LOGGER.log(Level.WARNING, "Unable to load CWE data due to an IO Error. This should not be an issue.");
LOGGER.log(Level.FINE, null, ex);
LOGGER.warn("Unable to load CWE data due to an IO Error. This should not be an issue.");
LOGGER.debug("", ex);
} finally {
if (oin != null) {
try {
oin.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
}
}

View File

@@ -21,11 +21,11 @@ import java.io.IOException;
import java.net.MalformedURLException;
import java.util.LinkedList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.owasp.dependencycheck.utils.UrlStringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
@@ -39,7 +39,7 @@ public final class UrlTokenizingFilter extends AbstractTokenizingFilter {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(UrlTokenizingFilter.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(UrlTokenizingFilter.class);
/**
* Constructs a new VersionTokenizingFilter.
*
@@ -70,7 +70,7 @@ public final class UrlTokenizingFilter extends AbstractTokenizingFilter {
final List<String> data = UrlStringUtils.extractImportantUrlData(part);
tokens.addAll(data);
} catch (MalformedURLException ex) {
LOGGER.log(Level.FINE, "error parsing " + part, ex);
LOGGER.debug("error parsing {}", part, ex);
tokens.add(part);
}
} else {

View File

@@ -21,8 +21,6 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.xpath.XPath;
@@ -30,6 +28,8 @@ import javax.xml.xpath.XPathFactory;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.URLConnectionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
/**
@@ -48,18 +48,10 @@ public class NexusSearch {
* Whether to use the Proxy when making requests.
*/
private boolean useProxy;
/**
* The username to use if the Nexus requires authentication.
*/
private String userName = null;
/**
* The password to use if the Nexus requires authentication.
*/
private char[] password;
/**
* Used for logging.
*/
private static final Logger LOGGER = Logger.getLogger(NexusSearch.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(NexusSearch.class);
/**
* Creates a NexusSearch for the given repository URL.
@@ -73,10 +65,10 @@ public class NexusSearch {
if (null != Settings.getString(Settings.KEYS.PROXY_SERVER)
&& Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_PROXY)) {
useProxy = true;
LOGGER.fine("Using proxy");
LOGGER.debug("Using proxy");
} else {
useProxy = false;
LOGGER.fine("Not using proxy");
LOGGER.debug("Not using proxy");
}
} catch (InvalidSettingException ise) {
useProxy = false;
@@ -99,7 +91,7 @@ public class NexusSearch {
final URL url = new URL(rootURL, String.format("identify/sha1/%s",
sha1.toLowerCase()));
LOGGER.fine(String.format("Searching Nexus url %s", url.toString()));
LOGGER.debug("Searching Nexus url {}", url);
// Determine if we need to use a proxy. The rules:
// 1) If the proxy is set, AND the setting is set to true, use the proxy
@@ -155,10 +147,9 @@ public class NexusSearch {
} else if (conn.getResponseCode() == 404) {
throw new FileNotFoundException("Artifact not found in Nexus");
} else {
final String msg = String.format("Could not connect to Nexus received response code: %d %s",
LOGGER.debug("Could not connect to Nexus received response code: {} {}",
conn.getResponseCode(), conn.getResponseMessage());
LOGGER.fine(msg);
throw new IOException(msg);
throw new IOException("Could not connect to Nexus");
}
}
@@ -175,13 +166,13 @@ public class NexusSearch {
conn.addRequestProperty("Accept", "application/xml");
conn.connect();
if (conn.getResponseCode() != 200) {
LOGGER.log(Level.WARNING, "Expected 200 result from Nexus, got {0}", conn.getResponseCode());
LOGGER.warn("Expected 200 result from Nexus, got {}", conn.getResponseCode());
return false;
}
final DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
final Document doc = builder.parse(conn.getInputStream());
if (!"status".equals(doc.getDocumentElement().getNodeName())) {
LOGGER.log(Level.WARNING, "Expected root node name of status, got {0}", doc.getDocumentElement().getNodeName());
LOGGER.warn("Expected root node name of status, got {}", doc.getDocumentElement().getNodeName());
return false;
}
} catch (Throwable e) {

View File

@@ -29,15 +29,14 @@ import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.owasp.dependencycheck.utils.DBUtils;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Loads the configured database driver and returns the database connection. If the embedded H2 database is used
* obtaining a connection will ensure the database file exists and that the appropriate table structure has been
* created.
* Loads the configured database driver and returns the database connection. If the embedded H2 database is used obtaining a
* connection will ensure the database file exists and that the appropriate table structure has been created.
*
* @author Jeremy Long
*/
@@ -46,7 +45,7 @@ public final class ConnectionFactory {
/**
* The Logger.
*/
private static final Logger LOGGER = Logger.getLogger(ConnectionFactory.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(ConnectionFactory.class);
/**
* The version of the current DB Schema.
*/
@@ -55,6 +54,10 @@ public final class ConnectionFactory {
* Resource location for SQL file used to create the database schema.
*/
public static final String DB_STRUCTURE_RESOURCE = "data/initialize.sql";
/**
* Resource location for SQL file used to create the database schema.
*/
public static final String DB_STRUCTURE_UPDATE_RESOURCE = "data/upgrade_%s.sql";
/**
* The database driver used to connect to the database.
*/
@@ -79,8 +82,8 @@ public final class ConnectionFactory {
}
/**
* Initializes the connection factory. Ensuring that the appropriate drivers are loaded and that a connection can be
* made successfully.
* Initializes the connection factory. Ensuring that the appropriate drivers are loaded and that a connection can be made
* successfully.
*
* @throws DatabaseException thrown if we are unable to connect to the database
*/
@@ -94,17 +97,17 @@ public final class ConnectionFactory {
//load the driver if necessary
final String driverName = Settings.getString(Settings.KEYS.DB_DRIVER_NAME, "");
if (!driverName.isEmpty()) { //likely need to load the correct driver
LOGGER.log(Level.FINE, "Loading driver: {0}", driverName);
LOGGER.debug("Loading driver: {}", driverName);
final String driverPath = Settings.getString(Settings.KEYS.DB_DRIVER_PATH, "");
try {
if (!driverPath.isEmpty()) {
LOGGER.log(Level.FINE, "Loading driver from: {0}", driverPath);
LOGGER.debug("Loading driver from: {}", driverPath);
driver = DriverLoader.load(driverName, driverPath);
} else {
driver = DriverLoader.load(driverName);
}
} catch (DriverLoadException ex) {
LOGGER.log(Level.FINE, "Unable to load database driver", ex);
LOGGER.debug("Unable to load database driver", ex);
throw new DatabaseException("Unable to load database driver");
}
}
@@ -114,10 +117,9 @@ public final class ConnectionFactory {
try {
connectionString = Settings.getConnectionString(
Settings.KEYS.DB_CONNECTION_STRING,
Settings.KEYS.DB_FILE_NAME,
Settings.KEYS.DB_VERSION);
Settings.KEYS.DB_FILE_NAME);
} catch (IOException ex) {
LOGGER.log(Level.FINE,
LOGGER.debug(
"Unable to retrieve the database connection string", ex);
throw new DatabaseException("Unable to retrieve the database connection string");
}
@@ -125,15 +127,15 @@ public final class ConnectionFactory {
try {
if (connectionString.startsWith("jdbc:h2:file:")) { //H2
shouldCreateSchema = !h2DataFileExists();
LOGGER.log(Level.FINE, "Need to create DB Structure: {0}", shouldCreateSchema);
LOGGER.debug("Need to create DB Structure: {}", shouldCreateSchema);
}
} catch (IOException ioex) {
LOGGER.log(Level.FINE, "Unable to verify database exists", ioex);
LOGGER.debug("Unable to verify database exists", ioex);
throw new DatabaseException("Unable to verify database exists");
}
LOGGER.log(Level.FINE, "Loading database connection");
LOGGER.log(Level.FINE, "Connection String: {0}", connectionString);
LOGGER.log(Level.FINE, "Database User: {0}", userName);
LOGGER.debug("Loading database connection");
LOGGER.debug("Connection String: {}", connectionString);
LOGGER.debug("Database User: {}", userName);
try {
conn = DriverManager.getConnection(connectionString, userName, password);
@@ -143,14 +145,14 @@ public final class ConnectionFactory {
try {
conn = DriverManager.getConnection(connectionString, userName, password);
Settings.setString(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
LOGGER.log(Level.FINE,
LOGGER.debug(
"Unable to start the database in server mode; reverting to single user mode");
} catch (SQLException sqlex) {
LOGGER.log(Level.FINE, "Unable to connect to the database", ex);
LOGGER.debug("Unable to connect to the database", ex);
throw new DatabaseException("Unable to connect to the database");
}
} else {
LOGGER.log(Level.FINE, "Unable to connect to the database", ex);
LOGGER.debug("Unable to connect to the database", ex);
throw new DatabaseException("Unable to connect to the database");
}
}
@@ -159,23 +161,22 @@ public final class ConnectionFactory {
try {
createTables(conn);
} catch (DatabaseException dex) {
LOGGER.log(Level.FINE, null, dex);
LOGGER.debug("", dex);
throw new DatabaseException("Unable to create the database structure");
}
} else {
try {
ensureSchemaVersion(conn);
} catch (DatabaseException dex) {
LOGGER.log(Level.FINE, null, dex);
throw new DatabaseException("Database schema does not match this version of dependency-check");
}
}
try {
ensureSchemaVersion(conn);
} catch (DatabaseException dex) {
LOGGER.debug("", dex);
throw new DatabaseException("Database schema does not match this version of dependency-check", dex);
}
} finally {
if (conn != null) {
try {
conn.close();
} catch (SQLException ex) {
LOGGER.log(Level.FINE, "An error occurred closing the connection", ex);
LOGGER.debug("An error occurred closing the connection", ex);
}
}
}
@@ -183,17 +184,17 @@ public final class ConnectionFactory {
/**
* Cleans up resources and unloads any registered database drivers. This needs to be called to ensure the driver is
* unregistered prior to the finalize method being called as during shutdown the class loader used to load the
* driver may be unloaded prior to the driver being de-registered.
* unregistered prior to the finalize method being called as during shutdown the class loader used to load the driver may be
* unloaded prior to the driver being de-registered.
*/
public static synchronized void cleanup() {
if (driver != null) {
try {
DriverManager.deregisterDriver(driver);
} catch (SQLException ex) {
LOGGER.log(Level.FINE, "An error occurred unloading the database driver", ex);
LOGGER.debug("An error occurred unloading the database driver", ex);
} catch (Throwable unexpected) {
LOGGER.log(Level.FINE,
LOGGER.debug(
"An unexpected throwable occurred unloading the database driver", unexpected);
}
driver = null;
@@ -215,7 +216,7 @@ public final class ConnectionFactory {
try {
conn = DriverManager.getConnection(connectionString, userName, password);
} catch (SQLException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new DatabaseException("Unable to connect to the database");
}
return conn;
@@ -229,8 +230,7 @@ public final class ConnectionFactory {
*/
private static boolean h2DataFileExists() throws IOException {
final File dir = Settings.getDataDirectory();
final String name = Settings.getString(Settings.KEYS.DB_FILE_NAME);
final String fileName = String.format(name, DB_SCHEMA_VERSION);
final String fileName = Settings.getString(Settings.KEYS.DB_FILE_NAME);
final File file = new File(dir, fileName);
return file.exists();
}
@@ -242,7 +242,7 @@ public final class ConnectionFactory {
* @throws DatabaseException thrown if there is a Database Exception
*/
private static void createTables(Connection conn) throws DatabaseException {
LOGGER.log(Level.FINE, "Creating database structure");
LOGGER.debug("Creating database structure");
InputStream is;
InputStreamReader reader;
BufferedReader in = null;
@@ -260,7 +260,7 @@ public final class ConnectionFactory {
statement = conn.createStatement();
statement.execute(sb.toString());
} catch (SQLException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new DatabaseException("Unable to create database statement", ex);
} finally {
DBUtils.closeStatement(statement);
@@ -272,7 +272,59 @@ public final class ConnectionFactory {
try {
in.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
}
}
}
/**
* Updates the database schema by loading the upgrade script for the version specified. The intended use is that if the
* current schema version is 2.9 then we would call updateSchema(conn, "2.9"). This would load the upgrade_2.9.sql file and
* execute it against the database. The upgrade script must update the 'version' in the properties table.
*
* @param conn the database connection object
* @param schema the current schema version that is being upgraded
* @throws DatabaseException thrown if there is an exception upgrading the database schema
*/
private static void updateSchema(Connection conn, String schema) throws DatabaseException {
LOGGER.debug("Updating database structure");
InputStream is;
InputStreamReader reader;
BufferedReader in = null;
String updateFile = null;
try {
updateFile = String.format(DB_STRUCTURE_UPDATE_RESOURCE, schema);
is = ConnectionFactory.class.getClassLoader().getResourceAsStream(updateFile);
if (is == null) {
throw new DatabaseException(String.format("Unable to load update file '%s'", updateFile));
}
reader = new InputStreamReader(is, "UTF-8");
in = new BufferedReader(reader);
final StringBuilder sb = new StringBuilder(2110);
String tmp;
while ((tmp = in.readLine()) != null) {
sb.append(tmp);
}
Statement statement = null;
try {
statement = conn.createStatement();
statement.execute(sb.toString());
} catch (SQLException ex) {
LOGGER.debug("", ex);
throw new DatabaseException("Unable to update database schema", ex);
} finally {
DBUtils.closeStatement(statement);
}
} catch (IOException ex) {
final String msg = String.format("Upgrade SQL file does not exist: %s", updateFile);
throw new DatabaseException(msg, ex);
} finally {
if (in != null) {
try {
in.close();
} catch (IOException ex) {
LOGGER.trace("", ex);
}
}
}
@@ -288,18 +340,20 @@ public final class ConnectionFactory {
ResultSet rs = null;
CallableStatement cs = null;
try {
//TODO convert this to use DatabaseProperties
cs = conn.prepareCall("SELECT value FROM properties WHERE id = 'version'");
rs = cs.executeQuery();
if (rs.next()) {
final boolean isWrongSchema = !DB_SCHEMA_VERSION.equals(rs.getString(1));
if (isWrongSchema) {
throw new DatabaseException("Incorrect database schema; unable to continue");
if (!DB_SCHEMA_VERSION.equals(rs.getString(1))) {
LOGGER.debug("Current Schema: " + DB_SCHEMA_VERSION);
LOGGER.debug("DB Schema: " + rs.getString(1));
updateSchema(conn, rs.getString(1));
}
} else {
throw new DatabaseException("Database schema is missing");
}
} catch (SQLException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new DatabaseException("Unable to check the database schema version");
} finally {
DBUtils.closeResultSet(rs);

View File

@@ -19,6 +19,7 @@ package org.owasp.dependencycheck.data.nvdcve;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
@@ -33,8 +34,6 @@ import java.util.Map.Entry;
import java.util.Properties;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.owasp.dependencycheck.data.cwe.CweDB;
import org.owasp.dependencycheck.dependency.Reference;
import org.owasp.dependencycheck.dependency.Vulnerability;
@@ -44,6 +43,8 @@ import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
import org.owasp.dependencycheck.utils.Pair;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The database holding information about the NVD CVE data.
@@ -55,7 +56,7 @@ public class CveDB {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(CveDB.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(CveDB.class);
/**
* Database connection
*/
@@ -73,7 +74,7 @@ public class CveDB {
*/
public CveDB() throws DatabaseException {
super();
statementBundle = java.util.ResourceBundle.getBundle("data/dbStatements");
statementBundle = ResourceBundle.getBundle("data/dbStatements");
try {
open();
databaseProperties = new DatabaseProperties(this);
@@ -110,13 +111,11 @@ public class CveDB {
try {
conn.close();
} catch (SQLException ex) {
final String msg = "There was an error attempting to close the CveDB, see the log for more details.";
LOGGER.log(Level.SEVERE, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.error("There was an error attempting to close the CveDB, see the log for more details.");
LOGGER.debug("", ex);
} catch (Throwable ex) {
final String msg = "There was an exception attempting to close the CveDB, see the log for more details.";
LOGGER.log(Level.SEVERE, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.error("There was an exception attempting to close the CveDB, see the log for more details.");
LOGGER.debug("", ex);
}
conn = null;
}
@@ -151,7 +150,7 @@ public class CveDB {
@Override
@SuppressWarnings("FinalizeDeclaration")
protected void finalize() throws Throwable {
LOGGER.log(Level.FINE, "Entering finalize");
LOGGER.debug("Entering finalize");
close();
super.finalize();
}
@@ -193,9 +192,8 @@ public class CveDB {
cpe.add(vs);
}
} catch (SQLException ex) {
final String msg = "An unexpected SQL Exception occurred; please see the verbose log for more details.";
LOGGER.log(Level.SEVERE, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.error("An unexpected SQL Exception occurred; please see the verbose log for more details.");
LOGGER.debug("", ex);
} finally {
DBUtils.closeResultSet(rs);
DBUtils.closeStatement(ps);
@@ -245,9 +243,8 @@ public class CveDB {
prop.setProperty(rs.getString(1), rs.getString(2));
}
} catch (SQLException ex) {
final String msg = "An unexpected SQL Exception occurred; please see the verbose log for more details.";
LOGGER.log(Level.SEVERE, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.error("An unexpected SQL Exception occurred; please see the verbose log for more details.");
LOGGER.debug("", ex);
} finally {
DBUtils.closeStatement(ps);
DBUtils.closeResultSet(rs);
@@ -268,8 +265,8 @@ public class CveDB {
updateProperty = getConnection().prepareStatement(statementBundle.getString("UPDATE_PROPERTY"));
insertProperty = getConnection().prepareStatement(statementBundle.getString("INSERT_PROPERTY"));
} catch (SQLException ex) {
LOGGER.log(Level.WARNING, "Unable to save properties to the database");
LOGGER.log(Level.FINE, "Unable to save properties to the database", ex);
LOGGER.warn("Unable to save properties to the database");
LOGGER.debug("Unable to save properties to the database", ex);
return;
}
for (Entry<Object, Object> entry : props.entrySet()) {
@@ -283,9 +280,8 @@ public class CveDB {
insertProperty.setString(2, value);
}
} catch (SQLException ex) {
final String msg = String.format("Unable to save property '%s' with a value of '%s' to the database", key, value);
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.warn("Unable to save property '{}' with a value of '{}' to the database", key, value);
LOGGER.debug("", ex);
}
}
} finally {
@@ -307,8 +303,8 @@ public class CveDB {
try {
updateProperty = getConnection().prepareStatement(statementBundle.getString("UPDATE_PROPERTY"));
} catch (SQLException ex) {
LOGGER.log(Level.WARNING, "Unable to save properties to the database");
LOGGER.log(Level.FINE, "Unable to save properties to the database", ex);
LOGGER.warn("Unable to save properties to the database");
LOGGER.debug("Unable to save properties to the database", ex);
return;
}
try {
@@ -318,8 +314,8 @@ public class CveDB {
try {
insertProperty = getConnection().prepareStatement(statementBundle.getString("INSERT_PROPERTY"));
} catch (SQLException ex) {
LOGGER.log(Level.WARNING, "Unable to save properties to the database");
LOGGER.log(Level.FINE, "Unable to save properties to the database", ex);
LOGGER.warn("Unable to save properties to the database");
LOGGER.debug("Unable to save properties to the database", ex);
return;
}
insertProperty.setString(1, key);
@@ -327,9 +323,8 @@ public class CveDB {
insertProperty.execute();
}
} catch (SQLException ex) {
final String msg = String.format("Unable to save property '%s' with a value of '%s' to the database", key, value);
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.warn("Unable to save property '{}' with a value of '{}' to the database", key, value);
LOGGER.debug("", ex);
}
} finally {
DBUtils.closeStatement(updateProperty);
@@ -350,7 +345,7 @@ public class CveDB {
try {
cpe.parseName(cpeStr);
} catch (UnsupportedEncodingException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
final DependencyVersion detectedVersion = parseDependencyVersion(cpe);
final List<Vulnerability> vulnerabilities = new ArrayList<Vulnerability>();
@@ -495,12 +490,15 @@ public class CveDB {
deleteReferences = getConnection().prepareStatement(statementBundle.getString("DELETE_REFERENCE"));
deleteSoftware = getConnection().prepareStatement(statementBundle.getString("DELETE_SOFTWARE"));
updateVulnerability = getConnection().prepareStatement(statementBundle.getString("UPDATE_VULNERABILITY"));
String ids[] = {"id"};
insertVulnerability = getConnection().prepareStatement(statementBundle.getString("INSERT_VULNERABILITY"),
Statement.RETURN_GENERATED_KEYS);
//Statement.RETURN_GENERATED_KEYS);
ids);
insertReference = getConnection().prepareStatement(statementBundle.getString("INSERT_REFERENCE"));
selectCpeId = getConnection().prepareStatement(statementBundle.getString("SELECT_CPE_ID"));
insertCpe = getConnection().prepareStatement(statementBundle.getString("INSERT_CPE"),
Statement.RETURN_GENERATED_KEYS);
//Statement.RETURN_GENERATED_KEYS);
ids);
insertSoftware = getConnection().prepareStatement(statementBundle.getString("INSERT_SOFTWARE"));
int vulnerabilityId = 0;
selectVulnerabilityId.setString(1, vuln.getName());
@@ -601,7 +599,7 @@ public class CveDB {
} catch (SQLException ex) {
final String msg = String.format("Error updating '%s'", vuln.getName());
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new DatabaseException(msg, ex);
} finally {
DBUtils.closeStatement(selectVulnerabilityId);
@@ -640,13 +638,12 @@ public class CveDB {
} catch (IOException ex1) {
dd = Settings.getString(Settings.KEYS.DATA_DIRECTORY);
}
final String msg = String.format("Unable to access the local database.%n%nEnsure that '%s' is a writable directory. "
+ "If the problem persist try deleting the files in '%s' and running %s again. If the problem continues, please "
LOGGER.error("Unable to access the local database.\n\nEnsure that '{}' is a writable directory. "
+ "If the problem persist try deleting the files in '{}' and running {} again. If the problem continues, please "
+ "create a log file (see documentation at http://jeremylong.github.io/DependencyCheck/) and open a ticket at "
+ "https://github.com/jeremylong/DependencyCheck/issues and include the log file.%n%n",
+ "https://github.com/jeremylong/DependencyCheck/issues and include the log file.\n\n",
dd, dd, Settings.getString(Settings.KEYS.APPLICATION_VAME));
LOGGER.log(Level.SEVERE, msg);
LOGGER.log(Level.FINE, "", ex);
LOGGER.debug("", ex);
} finally {
DBUtils.closeResultSet(rs);
DBUtils.closeStatement(cs);
@@ -666,9 +663,8 @@ public class CveDB {
ps.executeUpdate();
}
} catch (SQLException ex) {
final String msg = "An unexpected SQL Exception occurred; please see the verbose log for more details.";
LOGGER.log(Level.SEVERE, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.error("An unexpected SQL Exception occurred; please see the verbose log for more details.");
LOGGER.debug("", ex);
} finally {
DBUtils.closeStatement(ps);
}
@@ -759,7 +755,7 @@ public class CveDB {
cpe.parseName(cpeStr);
} catch (UnsupportedEncodingException ex) {
//never going to happen.
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
return parseDependencyVersion(cpe);
}
@@ -774,8 +770,8 @@ public class CveDB {
DependencyVersion cpeVersion;
if (cpe.getVersion() != null && !cpe.getVersion().isEmpty()) {
String versionText;
if (cpe.getRevision() != null && !cpe.getRevision().isEmpty()) {
versionText = String.format("%s.%s", cpe.getVersion(), cpe.getRevision());
if (cpe.getUpdate() != null && !cpe.getUpdate().isEmpty()) {
versionText = String.format("%s.%s", cpe.getVersion(), cpe.getUpdate());
} else {
versionText = cpe.getVersion();
}
@@ -785,4 +781,41 @@ public class CveDB {
}
return cpeVersion;
}
/**
* Deletes unused dictionary entries from the database.
*/
public void deleteUnusedCpe() {
CallableStatement cs = null;
try {
cs = getConnection().prepareCall(statementBundle.getString("DELETE_UNUSED_DICT_CPE"));
cs.executeUpdate();
} catch (SQLException ex) {
LOGGER.error("Unable to delete CPE dictionary entries", ex);
} finally {
DBUtils.closeStatement(cs);
}
}
/**
* Merges CPE entries into the database.
*
* @param cpe the CPE identifier
* @param vendor the CPE vendor
* @param product the CPE product
*/
public void addCpe(String cpe, String vendor, String product) {
PreparedStatement ps = null;
try {
ps = getConnection().prepareCall(statementBundle.getString("ADD_DICT_CPE"));
ps.setString(1, cpe);
ps.setString(2, vendor);
ps.setString(3, product);
ps.executeUpdate();
} catch (SQLException ex) {
LOGGER.error("Unable to add CPE dictionary entry", ex);
} finally {
DBUtils.closeStatement(ps);
}
}
}

View File

@@ -24,10 +24,10 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.TreeMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.owasp.dependencycheck.data.update.NvdCveInfo;
import org.owasp.dependencycheck.data.update.nvd.NvdCveInfo;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This is a wrapper around a set of properties that are stored in the database.
@@ -39,22 +39,30 @@ public class DatabaseProperties {
/**
* The Logger.
*/
private static final Logger LOGGER = Logger.getLogger(DatabaseProperties.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(DatabaseProperties.class);
/**
* Modified key word, used as a key to store information about the modified file (i.e. the containing the last 8
* days of updates)..
* Modified key word, used as a key to store information about the modified file (i.e. the containing the last 8 days of
* updates)..
*/
public static final String MODIFIED = "Modified";
/**
* The properties file key for the last updated field - used to store the last updated time of the Modified NVD CVE
* xml file.
* The properties file key for the last updated field - used to store the last updated time of the Modified NVD CVE xml file.
*/
public static final String LAST_UPDATED = "NVD CVE Modified";
/**
* Stores the last updated time for each of the NVD CVE files. These timestamps should be updated if we process the
* modified file within 7 days of the last update.
* Stores the last updated time for each of the NVD CVE files. These timestamps should be updated if we process the modified
* file within 7 days of the last update.
*/
public static final String LAST_UPDATED_BASE = "NVD CVE ";
/**
* The key for the last time the CPE data was updated.
*/
public static final String LAST_CPE_UPDATE = "LAST_CPE_UPDATE";
/**
* The key for the database schema version.
*/
public static final String VERSION = "version";
/**
* A collection of properties about the data.
*/
@@ -116,8 +124,7 @@ public class DatabaseProperties {
}
/**
* Returns the property value for the given key. If the key is not contained in the underlying properties null is
* returned.
* Returns the property value for the given key. If the key is not contained in the underlying properties null is returned.
*
* @param key the property key
* @return the value of the property
@@ -127,8 +134,8 @@ public class DatabaseProperties {
}
/**
* Returns the property value for the given key. If the key is not contained in the underlying properties the
* default value is returned.
* Returns the property value for the given key. If the key is not contained in the underlying properties the default value is
* returned.
*
* @param key the property key
* @param defaultValue the default value
@@ -148,8 +155,8 @@ public class DatabaseProperties {
}
/**
* Returns a map of the meta data from the database properties. This primarily contains timestamps of when the NVD
* CVE information was last updated.
* Returns a map of the meta data from the database properties. This primarily contains timestamps of when the NVD CVE
* information was last updated.
*
* @return a map of the database meta data
*/
@@ -166,7 +173,7 @@ public class DatabaseProperties {
final String formatted = format.format(date);
map.put(key, formatted);
} catch (Throwable ex) { //deliberately being broad in this catch clause
LOGGER.log(Level.FINE, "Unable to parse timestamp from DB", ex);
LOGGER.debug("Unable to parse timestamp from DB", ex);
map.put(key, (String) entry.getValue());
}
} else {

View File

@@ -17,6 +17,9 @@
*/
package org.owasp.dependencycheck.data.nvdcve;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
@@ -28,8 +31,6 @@ import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* DriverLoader is a utility class that is used to load database drivers.
@@ -41,7 +42,7 @@ public final class DriverLoader {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(DriverLoader.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(DriverLoader.class);
/**
* Private constructor for a utility class.
@@ -87,20 +88,18 @@ public final class DriverLoader {
try {
urls.add(f.toURI().toURL());
} catch (MalformedURLException ex) {
final String msg = String.format("Unable to load database driver '%s'; invalid path provided '%s'",
className, f.getAbsoluteFile());
LOGGER.log(Level.FINE, msg, ex);
throw new DriverLoadException(msg, ex);
LOGGER.debug("Unable to load database driver '{}'; invalid path provided '{}'",
className, f.getAbsoluteFile(), ex);
throw new DriverLoadException("Unable to load database driver. Invalid path provided", ex);
}
}
} else if (file.exists()) {
try {
urls.add(file.toURI().toURL());
} catch (MalformedURLException ex) {
final String msg = String.format("Unable to load database driver '%s'; invalid path provided '%s'",
className, file.getAbsoluteFile());
LOGGER.log(Level.FINE, msg, ex);
throw new DriverLoadException(msg, ex);
LOGGER.debug("Unable to load database driver '{}'; invalid path provided '{}'",
className, file.getAbsoluteFile(), ex);
throw new DriverLoadException("Unable to load database driver. Invalid path provided", ex);
}
}
}
@@ -133,19 +132,19 @@ public final class DriverLoader {
return shim;
} catch (ClassNotFoundException ex) {
final String msg = String.format("Unable to load database driver '%s'", className);
LOGGER.log(Level.FINE, msg, ex);
LOGGER.debug(msg, ex);
throw new DriverLoadException(msg, ex);
} catch (InstantiationException ex) {
final String msg = String.format("Unable to load database driver '%s'", className);
LOGGER.log(Level.FINE, msg, ex);
LOGGER.debug(msg, ex);
throw new DriverLoadException(msg, ex);
} catch (IllegalAccessException ex) {
final String msg = String.format("Unable to load database driver '%s'", className);
LOGGER.log(Level.FINE, msg, ex);
LOGGER.debug(msg, ex);
throw new DriverLoadException(msg, ex);
} catch (SQLException ex) {
final String msg = String.format("Unable to load database driver '%s'", className);
LOGGER.log(Level.FINE, msg, ex);
LOGGER.debug(msg, ex);
throw new DriverLoadException(msg, ex);
}
}

View File

@@ -17,6 +17,9 @@
*/
package org.owasp.dependencycheck.data.nvdcve;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.sql.Connection;
@@ -25,8 +28,6 @@ import java.sql.DriverPropertyInfo;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* <p>
@@ -42,7 +43,7 @@ class DriverShim implements Driver {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(DriverShim.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(DriverShim.class);
/**
* The database driver being wrapped.
*/
@@ -115,7 +116,7 @@ class DriverShim implements Driver {
* @see java.sql.Driver#getParentLogger()
*/
//@Override
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException {
//return driver.getParentLogger();
Method m = null;
try {
@@ -125,13 +126,13 @@ class DriverShim implements Driver {
}
if (m != null) {
try {
return (Logger) m.invoke(m);
return (java.util.logging.Logger) m.invoke(m);
} catch (IllegalAccessException ex) {
LOGGER.log(Level.FINER, null, ex);
LOGGER.trace("", ex);
} catch (IllegalArgumentException ex) {
LOGGER.log(Level.FINER, null, ex);
LOGGER.trace("", ex);
} catch (InvocationTargetException ex) {
LOGGER.log(Level.FINER, null, ex);
LOGGER.trace("", ex);
}
}
throw new SQLFeatureNotSupportedException();

View File

@@ -0,0 +1,88 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.update;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Jeremy Long
*/
public abstract class BaseUpdater {
/**
* Static logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(BaseUpdater.class);
/**
* Information about the timestamps and URLs for data that needs to be updated.
*/
private DatabaseProperties properties;
/**
* Reference to the Cve Database.
*/
private CveDB cveDB = null;
protected CveDB getCveDB() {
return cveDB;
}
protected DatabaseProperties getProperties() {
return properties;
}
/**
* Closes the CVE and CPE data stores.
*/
protected void closeDataStores() {
if (cveDB != null) {
try {
cveDB.close();
cveDB = null;
properties = null;
} catch (Throwable ignore) {
LOGGER.trace("Error closing the database", ignore);
}
}
}
/**
* Opens the data store.
*
* @throws UpdateException thrown if a data store cannot be opened
*/
protected final void openDataStores() throws UpdateException {
if (cveDB != null) {
return;
}
try {
cveDB = new CveDB();
cveDB.open();
properties = cveDB.getDatabaseProperties();
} catch (DatabaseException ex) {
closeDataStores();
LOGGER.debug("Database Exception opening databases", ex);
throw new UpdateException("Error updating the database, please see the log file for more details.");
}
}
}

View File

@@ -0,0 +1,198 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.update;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Date;
import java.util.List;
import java.util.zip.GZIPInputStream;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.apache.commons.io.FileUtils;
import static org.owasp.dependencycheck.data.nvdcve.DatabaseProperties.LAST_CPE_UPDATE;
import org.owasp.dependencycheck.data.update.cpe.CPEHandler;
import org.owasp.dependencycheck.data.update.cpe.Cpe;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.utils.DateUtil;
import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.Downloader;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
/**
* The CpeUpdater is designed to download the CPE data file from NIST and import the data into the database. However, as this
* currently adds no beneficial data, compared to what is in the CPE data contained in the CVE data files, this class is not
* currently used. The code is being kept as a future update may utilize more data from the CPE xml files.
*
* @author Jeremy Long
*/
public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
/**
* Static logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(CpeUpdater.class);
@Override
public void update() throws UpdateException {
try {
openDataStores();
if (updateNeeded()) {
LOGGER.info("Updating the Common Platform Enumeration (CPE)");
final File xml = downloadCpe();
final List<Cpe> cpes = processXML(xml);
getCveDB().deleteUnusedCpe();
for (Cpe cpe : cpes) {
getCveDB().addCpe(cpe.getValue(), cpe.getVendor(), cpe.getProduct());
}
final Date now = new Date();
getProperties().save(LAST_CPE_UPDATE, Long.toString(now.getTime()));
LOGGER.info("CPE update complete");
}
} finally {
closeDataStores();
}
}
/**
* Downloads the CPE XML file.
*
* @return the file reference to the CPE.xml file
* @throws UpdateException thrown if there is an issue downloading the XML file
*/
private File downloadCpe() throws UpdateException {
File xml;
final URL url;
try {
url = new URL(Settings.getString(Settings.KEYS.CPE_URL));
xml = File.createTempFile("cpe", ".xml", Settings.getTempDirectory());
Downloader.fetchFile(url, xml);
if (url.toExternalForm().endsWith(".xml.gz")) {
extractGzip(xml);
}
} catch (MalformedURLException ex) {
throw new UpdateException("Invalid CPE URL", ex);
} catch (DownloadFailedException ex) {
throw new UpdateException("Unable to download CPE XML file", ex);
} catch (IOException ex) {
throw new UpdateException("Unable to create temporary file to download CPE", ex);
}
return xml;
}
/**
* Parses the CPE XML file to return a list of CPE entries.
*
* @param xml the CPE data file
* @return the list of CPE entries
* @throws UpdateException thrown if there is an issue with parsing the XML file
*/
private List<Cpe> processXML(final File xml) throws UpdateException {
try {
final SAXParserFactory factory = SAXParserFactory.newInstance();
final SAXParser saxParser = factory.newSAXParser();
final CPEHandler handler = new CPEHandler();
saxParser.parse(xml, handler);
return handler.getData();
} catch (ParserConfigurationException ex) {
throw new UpdateException("Unable to parse CPE XML file due to SAX Parser Issue", ex);
} catch (SAXException ex) {
throw new UpdateException("Unable to parse CPE XML file due to SAX Parser Exception", ex);
} catch (IOException ex) {
throw new UpdateException("Unable to parse CPE XML file due to IO Failure", ex);
}
}
/**
* Checks to find the last time the CPE data was refreshed and if it needs to be updated.
*
* @return true if the CPE data should be refreshed
*/
private boolean updateNeeded() {
final Date now = new Date();
final int days = Settings.getInt(Settings.KEYS.CVE_MODIFIED_VALID_FOR_DAYS, 30);
long timestamp = 0;
final String ts = getProperties().getProperty(LAST_CPE_UPDATE);
if (ts != null && ts.matches("^[0-9]+$")) {
timestamp = Long.parseLong(ts);
}
return !DateUtil.withinDateRange(timestamp, now.getTime(), days);
}
/**
* Extracts the file contained in a gzip archive. The extracted file is placed in the exact same path as the file specified.
*
* @param file the archive file
* @throws FileNotFoundException thrown if the file does not exist
* @throws IOException thrown if there is an error extracting the file.
*/
private void extractGzip(File file) throws FileNotFoundException, IOException {
//TODO - move this to a util class as it is duplicative of (copy of) code in the DownloadTask
final String originalPath = file.getPath();
final File gzip = new File(originalPath + ".gz");
if (gzip.isFile() && !gzip.delete()) {
gzip.deleteOnExit();
}
if (!file.renameTo(gzip)) {
throw new IOException("Unable to rename '" + file.getPath() + "'");
}
final File newfile = new File(originalPath);
final byte[] buffer = new byte[4096];
GZIPInputStream cin = null;
FileOutputStream out = null;
try {
cin = new GZIPInputStream(new FileInputStream(gzip));
out = new FileOutputStream(newfile);
int len;
while ((len = cin.read(buffer)) > 0) {
out.write(buffer, 0, len);
}
} finally {
if (cin != null) {
try {
cin.close();
} catch (IOException ex) {
LOGGER.trace("ignore", ex);
}
}
if (out != null) {
try {
out.close();
} catch (IOException ex) {
LOGGER.trace("ignore", ex);
}
}
if (gzip.isFile()) {
FileUtils.deleteQuietly(gzip);
}
}
}
}

View File

@@ -22,8 +22,6 @@ import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Date;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.io.IOUtils;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
@@ -34,6 +32,8 @@ import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.URLConnectionFactory;
import org.owasp.dependencycheck.utils.URLConnectionFailureException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
@@ -44,7 +44,7 @@ public class EngineVersionCheck implements CachedWebDataSource {
/**
* Static logger.
*/
private static final Logger LOGGER = Logger.getLogger(EngineVersionCheck.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(EngineVersionCheck.class);
/**
* The property key indicating when the last version check occurred.
*/
@@ -85,23 +85,22 @@ public class EngineVersionCheck implements CachedWebDataSource {
public void update() throws UpdateException {
try {
openDatabase();
LOGGER.fine("Begin Engine Version Check");
LOGGER.debug("Begin Engine Version Check");
final DatabaseProperties properties = cveDB.getDatabaseProperties();
final long lastChecked = Long.parseLong(properties.getProperty(ENGINE_VERSION_CHECKED_ON, "0"));
final long now = (new Date()).getTime();
updateToVersion = properties.getProperty(CURRENT_ENGINE_RELEASE, "");
final String currentVersion = Settings.getString(Settings.KEYS.APPLICATION_VERSION, "0.0.0");
LOGGER.fine("Last checked: " + lastChecked);
LOGGER.fine("Now: " + now);
LOGGER.fine("Current version: " + currentVersion);
LOGGER.debug("Last checked: {}", lastChecked);
LOGGER.debug("Now: {}", now);
LOGGER.debug("Current version: {}", currentVersion);
final boolean updateNeeded = shouldUpdate(lastChecked, now, properties, currentVersion);
if (updateNeeded) {
final String msg = String.format("A new version of dependency-check is available. Consider updating to version %s.",
LOGGER.warn("A new version of dependency-check is available. Consider updating to version {}.",
updateToVersion);
LOGGER.warning(msg);
}
} catch (DatabaseException ex) {
LOGGER.log(Level.FINE, "Database Exception opening databases to retrieve properties", ex);
LOGGER.debug("Database Exception opening databases to retrieve properties", ex);
throw new UpdateException("Error occured updating database properties.");
} finally {
closeDatabase();
@@ -116,8 +115,8 @@ public class EngineVersionCheck implements CachedWebDataSource {
* @param properties the database properties object
* @param currentVersion the current version of dependency-check
* @return <code>true</code> if a newer version of the database has been released; otherwise <code>false</code>
* @throws UpdateException thrown if there is an error connecting to the github documentation site or accessing the
* local database.
* @throws UpdateException thrown if there is an error connecting to the github documentation site or accessing the local
* database.
*/
protected boolean shouldUpdate(final long lastChecked, final long now, final DatabaseProperties properties,
String currentVersion) throws UpdateException {
@@ -127,7 +126,7 @@ public class EngineVersionCheck implements CachedWebDataSource {
checkRange = 7;
}
if (!DateUtil.withinDateRange(lastChecked, now, checkRange)) {
LOGGER.fine("Checking web for new version.");
LOGGER.debug("Checking web for new version.");
final String currentRelease = getCurrentReleaseVersion();
if (currentRelease != null) {
final DependencyVersion v = new DependencyVersion(currentRelease);
@@ -141,15 +140,15 @@ public class EngineVersionCheck implements CachedWebDataSource {
properties.save(ENGINE_VERSION_CHECKED_ON, Long.toString(now));
}
}
LOGGER.log(Level.FINE, "Current Release: {0}", updateToVersion);
LOGGER.debug("Current Release: {}", updateToVersion);
}
final DependencyVersion running = new DependencyVersion(currentVersion);
final DependencyVersion released = new DependencyVersion(updateToVersion);
if (running.compareTo(released) < 0) {
LOGGER.fine("Upgrade recommended");
LOGGER.debug("Upgrade recommended");
return true;
}
LOGGER.fine("Upgrade not needed");
LOGGER.debug("Upgrade not needed");
return false;
}
@@ -173,8 +172,9 @@ public class EngineVersionCheck implements CachedWebDataSource {
if (cveDB != null) {
try {
cveDB.close();
cveDB = null;
} catch (Throwable ignore) {
LOGGER.log(Level.FINEST, "Error closing the cveDB", ignore);
LOGGER.trace("Error closing the cveDB", ignore);
}
}
}
@@ -199,11 +199,11 @@ public class EngineVersionCheck implements CachedWebDataSource {
return releaseVersion.trim();
}
} catch (MalformedURLException ex) {
LOGGER.log(Level.FINE, "unable to retrieve current release version of dependency-check", ex);
LOGGER.debug("unable to retrieve current release version of dependency-check", ex);
} catch (URLConnectionFailureException ex) {
LOGGER.log(Level.FINE, "unable to retrieve current release version of dependency-check", ex);
LOGGER.debug("unable to retrieve current release version of dependency-check", ex);
} catch (IOException ex) {
LOGGER.log(Level.FINE, "unable to retrieve current release version of dependency-check", ex);
LOGGER.debug("unable to retrieve current release version of dependency-check", ex);
} finally {
if (conn != null) {
conn.disconnect();

View File

@@ -18,23 +18,44 @@
package org.owasp.dependencycheck.data.update;
import java.net.MalformedURLException;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import static org.owasp.dependencycheck.data.nvdcve.DatabaseProperties.MODIFIED;
import org.owasp.dependencycheck.data.update.exception.InvalidDataException;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.data.update.nvd.DownloadTask;
import org.owasp.dependencycheck.data.update.nvd.NvdCveInfo;
import org.owasp.dependencycheck.data.update.nvd.ProcessTask;
import org.owasp.dependencycheck.data.update.nvd.UpdateableNvdCve;
import org.owasp.dependencycheck.utils.DateUtil;
import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Class responsible for updating the NVD CVE and CPE data stores.
* Class responsible for updating the NVD CVE data.
*
* @author Jeremy Long
*/
public class NvdCveUpdater implements CachedWebDataSource {
public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
/**
* The logger
*/
private static final Logger LOGGER = Logger.getLogger(NvdCveUpdater.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(NvdCveUpdater.class);
/**
* The max thread pool size to use when downloading files.
*/
public static final int MAX_THREAD_POOL_SIZE = Settings.getInt(Settings.KEYS.MAX_DOWNLOAD_THREAD_POOL_SIZE, 3);
/**
* <p>
@@ -45,22 +66,220 @@ public class NvdCveUpdater implements CachedWebDataSource {
@Override
public void update() throws UpdateException {
try {
final StandardUpdate task = new StandardUpdate();
if (task.isUpdateNeeded()) {
task.update();
openDataStores();
final UpdateableNvdCve updateable = getUpdatesNeeded();
if (updateable.isUpdateNeeded()) {
performUpdate(updateable);
}
} catch (MalformedURLException ex) {
LOGGER.log(Level.WARNING,
LOGGER.warn(
"NVD CVE properties files contain an invalid URL, unable to update the data to use the most current data.");
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
} catch (DownloadFailedException ex) {
LOGGER.log(Level.WARNING,
LOGGER.warn(
"Unable to download the NVD CVE data; the results may not include the most recent CPE/CVEs from the NVD.");
if (Settings.getString(Settings.KEYS.PROXY_SERVER) == null) {
LOGGER.log(Level.INFO,
LOGGER.info(
"If you are behind a proxy you may need to configure dependency-check to use the proxy.");
}
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
} finally {
closeDataStores();
}
}
/**
* Downloads the latest NVD CVE XML file from the web and imports it into the current CVE Database.
*
* @param updateable a collection of NVD CVE data file references that need to be downloaded and processed to update the
* database
* @throws UpdateException is thrown if there is an error updating the database
*/
public void performUpdate(UpdateableNvdCve updateable) throws UpdateException {
int maxUpdates = 0;
try {
for (NvdCveInfo cve : updateable) {
if (cve.getNeedsUpdate()) {
maxUpdates += 1;
}
}
if (maxUpdates <= 0) {
return;
}
if (maxUpdates > 3) {
LOGGER.info(
"NVD CVE requires several updates; this could take a couple of minutes.");
}
if (maxUpdates > 0) {
openDataStores();
}
final int poolSize = (MAX_THREAD_POOL_SIZE < maxUpdates) ? MAX_THREAD_POOL_SIZE : maxUpdates;
final ExecutorService downloadExecutors = Executors.newFixedThreadPool(poolSize);
final ExecutorService processExecutor = Executors.newSingleThreadExecutor();
final Set<Future<Future<ProcessTask>>> downloadFutures = new HashSet<Future<Future<ProcessTask>>>(maxUpdates);
for (NvdCveInfo cve : updateable) {
if (cve.getNeedsUpdate()) {
final DownloadTask call = new DownloadTask(cve, processExecutor, getCveDB(), Settings.getInstance());
downloadFutures.add(downloadExecutors.submit(call));
}
}
downloadExecutors.shutdown();
//next, move the future future processTasks to just future processTasks
final Set<Future<ProcessTask>> processFutures = new HashSet<Future<ProcessTask>>(maxUpdates);
for (Future<Future<ProcessTask>> future : downloadFutures) {
Future<ProcessTask> task = null;
try {
task = future.get();
} catch (InterruptedException ex) {
downloadExecutors.shutdownNow();
processExecutor.shutdownNow();
LOGGER.debug("Thread was interrupted during download", ex);
throw new UpdateException("The download was interrupted", ex);
} catch (ExecutionException ex) {
downloadExecutors.shutdownNow();
processExecutor.shutdownNow();
LOGGER.debug("Thread was interrupted during download execution", ex);
throw new UpdateException("The execution of the download was interrupted", ex);
}
if (task == null) {
downloadExecutors.shutdownNow();
processExecutor.shutdownNow();
LOGGER.debug("Thread was interrupted during download");
throw new UpdateException("The download was interrupted; unable to complete the update");
} else {
processFutures.add(task);
}
}
for (Future<ProcessTask> future : processFutures) {
try {
final ProcessTask task = future.get();
if (task.getException() != null) {
throw task.getException();
}
} catch (InterruptedException ex) {
processExecutor.shutdownNow();
LOGGER.debug("Thread was interrupted during processing", ex);
throw new UpdateException(ex);
} catch (ExecutionException ex) {
processExecutor.shutdownNow();
LOGGER.debug("Execution Exception during process", ex);
throw new UpdateException(ex);
} finally {
processExecutor.shutdown();
}
}
if (maxUpdates >= 1) { //ensure the modified file date gets written (we may not have actually updated it)
getProperties().save(updateable.get(MODIFIED));
LOGGER.info("Begin database maintenance.");
getCveDB().cleanupDatabase();
LOGGER.info("End database maintenance.");
}
} finally {
closeDataStores();
}
}
/**
* Determines if the index needs to be updated. This is done by fetching the NVD CVE meta data and checking the last update
* date. If the data needs to be refreshed this method will return the NvdCveUrl for the files that need to be updated.
*
* @return the collection of files that need to be updated
* @throws MalformedURLException is thrown if the URL for the NVD CVE Meta data is incorrect
* @throws DownloadFailedException is thrown if there is an error. downloading the NVD CVE download data file
* @throws UpdateException Is thrown if there is an issue with the last updated properties file
*/
protected final UpdateableNvdCve getUpdatesNeeded() throws MalformedURLException, DownloadFailedException, UpdateException {
UpdateableNvdCve updates = null;
try {
updates = retrieveCurrentTimestampsFromWeb();
} catch (InvalidDataException ex) {
final String msg = "Unable to retrieve valid timestamp from nvd cve downloads page";
LOGGER.debug(msg, ex);
throw new DownloadFailedException(msg, ex);
} catch (InvalidSettingException ex) {
LOGGER.debug("Invalid setting found when retrieving timestamps", ex);
throw new DownloadFailedException("Invalid settings", ex);
}
if (updates == null) {
throw new DownloadFailedException("Unable to retrieve the timestamps of the currently published NVD CVE data");
}
if (!getProperties().isEmpty()) {
try {
final long lastUpdated = Long.parseLong(getProperties().getProperty(DatabaseProperties.LAST_UPDATED, "0"));
final Date now = new Date();
final int days = Settings.getInt(Settings.KEYS.CVE_MODIFIED_VALID_FOR_DAYS, 7);
if (lastUpdated == updates.getTimeStamp(MODIFIED)) {
updates.clear(); //we don't need to update anything.
} else if (DateUtil.withinDateRange(lastUpdated, now.getTime(), days)) {
for (NvdCveInfo entry : updates) {
if (MODIFIED.equals(entry.getId())) {
entry.setNeedsUpdate(true);
} else {
entry.setNeedsUpdate(false);
}
}
} else { //we figure out which of the several XML files need to be downloaded.
for (NvdCveInfo entry : updates) {
if (MODIFIED.equals(entry.getId())) {
entry.setNeedsUpdate(true);
} else {
long currentTimestamp = 0;
try {
currentTimestamp = Long.parseLong(getProperties().getProperty(DatabaseProperties.LAST_UPDATED_BASE
+ entry.getId(), "0"));
} catch (NumberFormatException ex) {
LOGGER.debug("Error parsing '{}' '{}' from nvdcve.lastupdated",
DatabaseProperties.LAST_UPDATED_BASE, entry.getId(), ex);
}
if (currentTimestamp == entry.getTimestamp()) {
entry.setNeedsUpdate(false);
}
}
}
}
} catch (NumberFormatException ex) {
LOGGER.warn("An invalid schema version or timestamp exists in the data.properties file.");
LOGGER.debug("", ex);
}
}
return updates;
}
/**
* Retrieves the timestamps from the NVD CVE meta data file.
*
* @return the timestamp from the currently published nvdcve downloads page
* @throws MalformedURLException thrown if the URL for the NVD CCE Meta data is incorrect.
* @throws DownloadFailedException thrown if there is an error downloading the nvd cve meta data file
* @throws InvalidDataException thrown if there is an exception parsing the timestamps
* @throws InvalidSettingException thrown if the settings are invalid
*/
private UpdateableNvdCve retrieveCurrentTimestampsFromWeb()
throws MalformedURLException, DownloadFailedException, InvalidDataException, InvalidSettingException {
final UpdateableNvdCve updates = new UpdateableNvdCve();
updates.add(MODIFIED, Settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL),
Settings.getString(Settings.KEYS.CVE_MODIFIED_12_URL),
false);
final int start = Settings.getInt(Settings.KEYS.CVE_START_YEAR);
final int end = Calendar.getInstance().get(Calendar.YEAR);
final String baseUrl20 = Settings.getString(Settings.KEYS.CVE_SCHEMA_2_0);
final String baseUrl12 = Settings.getString(Settings.KEYS.CVE_SCHEMA_1_2);
for (int i = start; i <= end; i++) {
updates.add(Integer.toString(i), String.format(baseUrl20, i),
String.format(baseUrl12, i),
true);
}
return updates;
}
}

View File

@@ -1,321 +0,0 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2012 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.update;
import java.net.MalformedURLException;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import static org.owasp.dependencycheck.data.nvdcve.DatabaseProperties.MODIFIED;
import org.owasp.dependencycheck.data.update.exception.InvalidDataException;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.data.update.task.DownloadTask;
import org.owasp.dependencycheck.data.update.task.ProcessTask;
import org.owasp.dependencycheck.utils.DateUtil;
import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
/**
* Class responsible for updating the NVDCVE data store.
*
* @author Jeremy Long
*/
public class StandardUpdate {
/**
* Static logger.
*/
private static final Logger LOGGER = Logger.getLogger(StandardUpdate.class.getName());
/**
* The max thread pool size to use when downloading files.
*/
public static final int MAX_THREAD_POOL_SIZE = Settings.getInt(Settings.KEYS.MAX_DOWNLOAD_THREAD_POOL_SIZE, 3);
/**
* Information about the timestamps and URLs for data that needs to be updated.
*/
private DatabaseProperties properties;
/**
* A collection of updateable NVD CVE items.
*/
private UpdateableNvdCve updateable;
/**
* Reference to the Cve Database.
*/
private CveDB cveDB = null;
/**
* Gets whether or not an update is needed.
*
* @return true or false depending on whether an update is needed
*/
public boolean isUpdateNeeded() {
return updateable.isUpdateNeeded();
}
/**
* Constructs a new Standard Update Task.
*
* @throws MalformedURLException thrown if a configured URL is malformed
* @throws DownloadFailedException thrown if a timestamp cannot be checked on a configured URL
* @throws UpdateException thrown if there is an exception generating the update task
*/
public StandardUpdate() throws MalformedURLException, DownloadFailedException, UpdateException {
openDataStores();
properties = cveDB.getDatabaseProperties();
updateable = updatesNeeded();
}
/**
* <p>
* Downloads the latest NVD CVE XML file from the web and imports it into the current CVE Database.</p>
*
* @throws UpdateException is thrown if there is an error updating the database
*/
public void update() throws UpdateException {
int maxUpdates = 0;
try {
for (NvdCveInfo cve : updateable) {
if (cve.getNeedsUpdate()) {
maxUpdates += 1;
}
}
if (maxUpdates <= 0) {
return;
}
if (maxUpdates > 3) {
LOGGER.log(Level.INFO,
"NVD CVE requires several updates; this could take a couple of minutes.");
}
if (maxUpdates > 0) {
openDataStores();
}
final int poolSize = (MAX_THREAD_POOL_SIZE < maxUpdates) ? MAX_THREAD_POOL_SIZE : maxUpdates;
final ExecutorService downloadExecutors = Executors.newFixedThreadPool(poolSize);
final ExecutorService processExecutor = Executors.newSingleThreadExecutor();
final Set<Future<Future<ProcessTask>>> downloadFutures = new HashSet<Future<Future<ProcessTask>>>(maxUpdates);
for (NvdCveInfo cve : updateable) {
if (cve.getNeedsUpdate()) {
final DownloadTask call = new DownloadTask(cve, processExecutor, cveDB, Settings.getInstance());
downloadFutures.add(downloadExecutors.submit(call));
}
}
downloadExecutors.shutdown();
//next, move the future future processTasks to just future processTasks
final Set<Future<ProcessTask>> processFutures = new HashSet<Future<ProcessTask>>(maxUpdates);
for (Future<Future<ProcessTask>> future : downloadFutures) {
Future<ProcessTask> task = null;
try {
task = future.get();
} catch (InterruptedException ex) {
downloadExecutors.shutdownNow();
processExecutor.shutdownNow();
LOGGER.log(Level.FINE, "Thread was interrupted during download", ex);
throw new UpdateException("The download was interrupted", ex);
} catch (ExecutionException ex) {
downloadExecutors.shutdownNow();
processExecutor.shutdownNow();
LOGGER.log(Level.FINE, "Thread was interrupted during download execution", ex);
throw new UpdateException("The execution of the download was interrupted", ex);
}
if (task == null) {
downloadExecutors.shutdownNow();
processExecutor.shutdownNow();
LOGGER.log(Level.FINE, "Thread was interrupted during download");
throw new UpdateException("The download was interrupted; unable to complete the update");
} else {
processFutures.add(task);
}
}
for (Future<ProcessTask> future : processFutures) {
try {
final ProcessTask task = future.get();
if (task.getException() != null) {
throw task.getException();
}
} catch (InterruptedException ex) {
processExecutor.shutdownNow();
LOGGER.log(Level.FINE, "Thread was interrupted during processing", ex);
throw new UpdateException(ex);
} catch (ExecutionException ex) {
processExecutor.shutdownNow();
LOGGER.log(Level.FINE, "Execution Exception during process", ex);
throw new UpdateException(ex);
} finally {
processExecutor.shutdown();
}
}
if (maxUpdates >= 1) { //ensure the modified file date gets written (we may not have actually updated it)
properties.save(updateable.get(MODIFIED));
LOGGER.log(Level.INFO, "Begin database maintenance.");
cveDB.cleanupDatabase();
LOGGER.log(Level.INFO, "End database maintenance.");
}
} finally {
closeDataStores();
}
}
/**
* Determines if the index needs to be updated. This is done by fetching the NVD CVE meta data and checking the last
* update date. If the data needs to be refreshed this method will return the NvdCveUrl for the files that need to
* be updated.
*
* @return the collection of files that need to be updated
* @throws MalformedURLException is thrown if the URL for the NVD CVE Meta data is incorrect
* @throws DownloadFailedException is thrown if there is an error. downloading the NVD CVE download data file
* @throws UpdateException Is thrown if there is an issue with the last updated properties file
*/
protected final UpdateableNvdCve updatesNeeded() throws MalformedURLException, DownloadFailedException, UpdateException {
UpdateableNvdCve updates = null;
try {
updates = retrieveCurrentTimestampsFromWeb();
} catch (InvalidDataException ex) {
final String msg = "Unable to retrieve valid timestamp from nvd cve downloads page";
LOGGER.log(Level.FINE, msg, ex);
throw new DownloadFailedException(msg, ex);
} catch (InvalidSettingException ex) {
LOGGER.log(Level.FINE, "Invalid setting found when retrieving timestamps", ex);
throw new DownloadFailedException("Invalid settings", ex);
}
if (updates == null) {
throw new DownloadFailedException("Unable to retrieve the timestamps of the currently published NVD CVE data");
}
if (!properties.isEmpty()) {
try {
final long lastUpdated = Long.parseLong(properties.getProperty(DatabaseProperties.LAST_UPDATED, "0"));
final Date now = new Date();
final int days = Settings.getInt(Settings.KEYS.CVE_MODIFIED_VALID_FOR_DAYS, 7);
if (lastUpdated == updates.getTimeStamp(MODIFIED)) {
updates.clear(); //we don't need to update anything.
} else if (DateUtil.withinDateRange(lastUpdated, now.getTime(), days)) {
for (NvdCveInfo entry : updates) {
if (MODIFIED.equals(entry.getId())) {
entry.setNeedsUpdate(true);
} else {
entry.setNeedsUpdate(false);
}
}
} else { //we figure out which of the several XML files need to be downloaded.
for (NvdCveInfo entry : updates) {
if (MODIFIED.equals(entry.getId())) {
entry.setNeedsUpdate(true);
} else {
long currentTimestamp = 0;
try {
currentTimestamp = Long.parseLong(properties.getProperty(DatabaseProperties.LAST_UPDATED_BASE + entry.getId(), "0"));
} catch (NumberFormatException ex) {
final String msg = String.format("Error parsing '%s' '%s' from nvdcve.lastupdated",
DatabaseProperties.LAST_UPDATED_BASE, entry.getId());
LOGGER.log(Level.FINE, msg, ex);
}
if (currentTimestamp == entry.getTimestamp()) {
entry.setNeedsUpdate(false);
}
}
}
}
} catch (NumberFormatException ex) {
final String msg = "An invalid schema version or timestamp exists in the data.properties file.";
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, "", ex);
}
}
return updates;
}
/**
* Retrieves the timestamps from the NVD CVE meta data file.
*
* @return the timestamp from the currently published nvdcve downloads page
* @throws MalformedURLException thrown if the URL for the NVD CCE Meta data is incorrect.
* @throws DownloadFailedException thrown if there is an error downloading the nvd cve meta data file
* @throws InvalidDataException thrown if there is an exception parsing the timestamps
* @throws InvalidSettingException thrown if the settings are invalid
*/
private UpdateableNvdCve retrieveCurrentTimestampsFromWeb()
throws MalformedURLException, DownloadFailedException, InvalidDataException, InvalidSettingException {
final UpdateableNvdCve updates = new UpdateableNvdCve();
updates.add(MODIFIED, Settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL),
Settings.getString(Settings.KEYS.CVE_MODIFIED_12_URL),
false);
final int start = Settings.getInt(Settings.KEYS.CVE_START_YEAR);
final int end = Calendar.getInstance().get(Calendar.YEAR);
final String baseUrl20 = Settings.getString(Settings.KEYS.CVE_SCHEMA_2_0);
final String baseUrl12 = Settings.getString(Settings.KEYS.CVE_SCHEMA_1_2);
for (int i = start; i <= end; i++) {
updates.add(Integer.toString(i), String.format(baseUrl20, i),
String.format(baseUrl12, i),
true);
}
return updates;
}
/**
* Closes the CVE and CPE data stores.
*/
protected void closeDataStores() {
if (cveDB != null) {
try {
cveDB.close();
} catch (Throwable ignore) {
LOGGER.log(Level.FINEST, "Error closing the cveDB", ignore);
}
}
}
/**
* Opens the CVE and CPE data stores.
*
* @throws UpdateException thrown if a data store cannot be opened
*/
protected final void openDataStores() throws UpdateException {
if (cveDB != null) {
return;
}
try {
cveDB = new CveDB();
cveDB.open();
} catch (DatabaseException ex) {
closeDataStores();
LOGGER.log(Level.FINE, "Database Exception opening databases", ex);
throw new UpdateException("Error updating the CPE/CVE data, please see the log file for more details.");
}
}
}

View File

@@ -0,0 +1,364 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.update.cpe;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
import org.owasp.dependencycheck.data.update.NvdCveUpdater;
import org.owasp.dependencycheck.data.update.exception.InvalidDataException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
/**
* A SAX Handler that will parse the CPE XML and load it into the databse.
*
* @author Jeremy Long
*/
public class CPEHandler extends DefaultHandler {
/**
* The current CPE schema.
*/
private static final String CURRENT_SCHEMA_VERSION = "2.3";
/**
* The text content of the node being processed. This can be used during the end element event.
*/
private StringBuilder nodeText = null;
/**
* A reference to the current element.
*/
private Element current = new Element();
/**
* The logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(NvdCveUpdater.class);
/**
* The list of CPE values.
*/
private List<Cpe> data = new ArrayList<Cpe>();
/**
* Returns the list of CPE values.
*
* @return the list of CPE values
*/
public List<Cpe> getData() {
return data;
}
/**
* Handles the start element event.
*
* @param uri the elements uri
* @param localName the local name
* @param qName the qualified name
* @param attributes the attributes
* @throws SAXException thrown if there is an exception processing the element
*/
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
nodeText = null;
current.setNode(qName);
if (current.isCpeItemNode()) {
final String temp = attributes.getValue("deprecated");
final String value = attributes.getValue("name");
final boolean delete = "true".equalsIgnoreCase(temp);
if (!delete && value.startsWith("cpe:/a:") && value.length() > 7) {
try {
final Cpe cpe = new Cpe(value);
data.add(cpe);
} catch (UnsupportedEncodingException ex) {
LOGGER.debug("Unable to parse the CPE", ex);
} catch (InvalidDataException ex) {
LOGGER.debug("CPE is not the correct format", ex);
}
}
} else if (current.isSchemaVersionNode()) {
nodeText = new StringBuilder(3);
}
// } else if (current.isTitleNode()) {
// //do nothing
// } else if (current.isMetaNode()) {
// //do nothing
// } else if (current.isTimestampNode()) {
// //do nothing
// } else if (current.isCpeListNode()) {
// //do nothing
// } else if (current.isNotesNode()) {
// //do nothing
// } else if (current.isNoteNode()) {
// //do nothing
// } else if (current.isCheckNode()) {
// //do nothing
// } else if (current.isGeneratorNode()) {
// //do nothing
// } else if (current.isProductNameNode()) {
// //do nothing
// } else if (current.isProductVersionNode()) {
// //do nothing
}
/**
* Reads the characters in the current node.
*
* @param ch the char array
* @param start the start position of the data read
* @param length the length of the data read
* @throws SAXException thrown if there is an exception processing the characters
*/
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
if (nodeText != null) {
nodeText.append(ch, start, length);
}
}
/**
* Handles the end element event. Stores the CPE data in the Cve Database if the cpe item node is ending.
*
* @param uri the element's uri
* @param localName the local name
* @param qName the qualified name
* @throws SAXException thrown if there is an exception processing the element
*/
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
current.setNode(qName);
if (current.isSchemaVersionNode() && !CURRENT_SCHEMA_VERSION.equals(nodeText.toString())) {
throw new SAXException("ERROR: Unexpecgted CPE Schema Version, expected: "
+ CURRENT_SCHEMA_VERSION + ", file is: " + nodeText);
}
// } else if (current.isCpeItemNode()) {
// //do nothing
// } else if (current.isTitleNode()) {
// //do nothing
// } else if (current.isCpeListNode()) {
// //do nothing
// } else if (current.isMetaNode()) {
// //do nothing
// } else if (current.isNotesNode()) {
// //do nothing
// } else if (current.isNoteNode()) {
// //do nothing
// } else if (current.isCheckNode()) {
// //do nothing
// } else if (current.isGeneratorNode()) {
// //do nothing
// } else if (current.isProductNameNode()) {
// //do nothing
// } else if (current.isProductVersionNode()) {
// //do nothing
// else if (current.isTimestampNode()) {
// //do nothing
// } else {
// throw new SAXException("ERROR STATE: Unexpected qName '" + qName + "'");
// }
}
// <editor-fold defaultstate="collapsed" desc="The Element Class that maintains state information about the current node">
/**
* A simple class to maintain information about the current element while parsing the CPE XML.
*/
protected class Element {
/**
* A node type in the CPE Schema 2.2
*/
public static final String CPE_LIST = "cpe-list";
/**
* A node type in the CPE Schema 2.2
*/
public static final String CPE_ITEM = "cpe-item";
/**
* A node type in the CPE Schema 2.2
*/
public static final String TITLE = "title";
/**
* A node type in the CPE Schema 2.2
*/
public static final String NOTES = "notes";
/**
* A node type in the CPE Schema 2.2
*/
public static final String NOTE = "note";
/**
* A node type in the CPE Schema 2.2
*/
public static final String CHECK = "check";
/**
* A node type in the CPE Schema 2.2
*/
public static final String META = "meta:item-metadata";
/**
* A node type in the CPE Schema 2.2
*/
public static final String GENERATOR = "generator";
/**
* A node type in the CPE Schema 2.2
*/
public static final String PRODUCT_NAME = "product_name";
/**
* A node type in the CPE Schema 2.2
*/
public static final String PRODUCT_VERSION = "product_version";
/**
* A node type in the CPE Schema 2.2
*/
public static final String SCHEMA_VERSION = "schema_version";
/**
* A node type in the CPE Schema 2.2
*/
public static final String TIMESTAMP = "timestamp";
/**
* A reference to the current node.
*/
private String node = null;
/**
* Gets the value of node
*
* @return the value of node
*/
public String getNode() {
return this.node;
}
/**
* Sets the value of node
*
* @param node new value of node
*/
public void setNode(String node) {
this.node = node;
}
/**
* Checks if the handler is at the CPE_LIST node
*
* @return true or false
*/
public boolean isCpeListNode() {
return CPE_LIST.equals(node);
}
/**
* Checks if the handler is at the CPE_ITEM node
*
* @return true or false
*/
public boolean isCpeItemNode() {
return CPE_ITEM.equals(node);
}
/**
* Checks if the handler is at the TITLE node
*
* @return true or false
*/
public boolean isTitleNode() {
return TITLE.equals(node);
}
/**
* Checks if the handler is at the NOTES node
*
* @return true or false
*/
public boolean isNotesNode() {
return NOTES.equals(node);
}
/**
* Checks if the handler is at the NOTE node
*
* @return true or false
*/
public boolean isNoteNode() {
return NOTE.equals(node);
}
/**
* Checks if the handler is at the CHECK node
*
* @return true or false
*/
public boolean isCheckNode() {
return CHECK.equals(node);
}
/**
* Checks if the handler is at the META node
*
* @return true or false
*/
public boolean isMetaNode() {
return META.equals(node);
}
/**
* Checks if the handler is at the GENERATOR node
*
* @return true or false
*/
public boolean isGeneratorNode() {
return GENERATOR.equals(node);
}
/**
* Checks if the handler is at the PRODUCT_NAME node
*
* @return true or false
*/
public boolean isProductNameNode() {
return PRODUCT_NAME.equals(node);
}
/**
* Checks if the handler is at the PRODUCT_VERSION node
*
* @return true or false
*/
public boolean isProductVersionNode() {
return PRODUCT_VERSION.equals(node);
}
/**
* Checks if the handler is at the SCHEMA_VERSION node
*
* @return true or false
*/
public boolean isSchemaVersionNode() {
return SCHEMA_VERSION.equals(node);
}
/**
* Checks if the handler is at the TIMESTAMP node
*
* @return true or false
*/
public boolean isTimestampNode() {
return TIMESTAMP.equals(node);
}
}
// </editor-fold>
}

View File

@@ -0,0 +1,125 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.update.cpe;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import org.owasp.dependencycheck.data.update.exception.InvalidDataException;
/**
*
* @author Jeremy Long
*/
public class Cpe {
/**
* Constructs a new Cpe Object by parsing the vendor and product from the CPE identifier value.
*
* @param value the cpe identifier (cpe:/a:vendor:product:version:....)
* @throws UnsupportedEncodingException thrown if UTF-8 is not supported
* @throws InvalidDataException thrown if the CPE provided is not the correct format
*/
public Cpe(String value) throws UnsupportedEncodingException, InvalidDataException {
this.value = value;
final String[] data = value.substring(7).split(":");
if (data.length >= 2) {
vendor = URLDecoder.decode(data[0].replace("+", "%2B"), "UTF-8");
product = URLDecoder.decode(data[1].replace("+", "%2B"), "UTF-8");
} else {
throw new InvalidDataException(String.format("CPE has an invalid format: %s", value));
}
}
/**
* The CPE identifier string (cpe:/a:vendor:product:version).
*/
private String value;
/**
* Get the value of value.
*
* @return the value of value
*/
public String getValue() {
return value;
}
/**
* Set the value of value.
*
* @param value new value of value
*/
public void setValue(String value) {
this.value = value;
}
/**
* The vendor portion of the identifier.
*/
private String vendor;
/**
* Get the value of vendor.
*
* @return the value of vendor
*/
public String getVendor() {
return vendor;
}
/**
* Set the value of vendor.
*
* @param vendor new value of vendor
*/
public void setVendor(String vendor) {
this.vendor = vendor;
}
/**
* The product portion of the identifier.
*/
private String product;
/**
* Get the value of product.
*
* @return the value of product
*/
public String getProduct() {
return product;
}
/**
* Set the value of product.
*
* @param product new value of product
*/
public void setProduct(String product) {
this.product = product;
}
/**
* Returns the full CPE identifier.
*
* @return the full CPE identifier
*/
@Override
public String toString() {
return value;
}
}

View File

@@ -0,0 +1,7 @@
/**
* Contains classes used to parse the CPE XML file from NIST.<br/><br/>
*
* These classes are not used as they add no value over the existing CPE data contained within the CVE data from the NVD. However,
* we may consider pulling the more descriptive data from the CPE data in the future.
*/
package org.owasp.dependencycheck.data.update.cpe;

View File

@@ -15,7 +15,7 @@
*
* Copyright (c) 2013 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.update.task;
package org.owasp.dependencycheck.data.update.nvd;
import java.io.File;
import java.io.FileInputStream;
@@ -26,16 +26,15 @@ import java.net.URL;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.zip.GZIPInputStream;
import org.apache.commons.io.FileUtils;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.update.NvdCveInfo;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.Downloader;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A callable object to download two files.
@@ -47,7 +46,7 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
/**
* The Logger.
*/
private static final Logger LOGGER = Logger.getLogger(DownloadTask.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(DownloadTask.class);
/**
* Simple constructor for the callable download task.
@@ -55,8 +54,8 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
* @param nvdCveInfo the NVD CVE info
* @param processor the processor service to submit the downloaded files to
* @param cveDB the CVE DB to use to store the vulnerability data
* @param settings a reference to the global settings object; this is necessary so that when the thread is started
* the dependencies have a correct reference to the global settings.
* @param settings a reference to the global settings object; this is necessary so that when the thread is started the
* dependencies have a correct reference to the global settings.
* @throws UpdateException thrown if temporary files could not be created
*/
public DownloadTask(NvdCveInfo nvdCveInfo, ExecutorService processor, CveDB cveDB, Settings settings) throws UpdateException {
@@ -185,19 +184,17 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
Settings.setInstance(settings);
final URL url1 = new URL(nvdCveInfo.getUrl());
final URL url2 = new URL(nvdCveInfo.getOldSchemaVersionUrl());
String msg = String.format("Download Started for NVD CVE - %s", nvdCveInfo.getId());
LOGGER.log(Level.INFO, msg);
LOGGER.info("Download Started for NVD CVE - {}", nvdCveInfo.getId());
try {
Downloader.fetchFile(url1, first);
Downloader.fetchFile(url2, second);
} catch (DownloadFailedException ex) {
msg = String.format("Download Failed for NVD CVE - %s%nSome CVEs may not be reported.", nvdCveInfo.getId());
LOGGER.log(Level.WARNING, msg);
LOGGER.warn("Download Failed for NVD CVE - {}\nSome CVEs may not be reported.", nvdCveInfo.getId());
if (Settings.getString(Settings.KEYS.PROXY_SERVER) == null) {
LOGGER.log(Level.INFO,
LOGGER.info(
"If you are behind a proxy you may need to configure dependency-check to use the proxy.");
}
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
return null;
}
if (url1.toExternalForm().endsWith(".xml.gz")) {
@@ -207,8 +204,7 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
extractGzip(second);
}
msg = String.format("Download Complete for NVD CVE - %s", nvdCveInfo.getId());
LOGGER.log(Level.INFO, msg);
LOGGER.info("Download Complete for NVD CVE - {}", nvdCveInfo.getId());
if (this.processorService == null) {
return null;
}
@@ -216,9 +212,8 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
return this.processorService.submit(task);
} catch (Throwable ex) {
final String msg = String.format("An exception occurred downloading NVD CVE - %s%nSome CVEs may not be reported.", nvdCveInfo.getId());
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, "Download Task Failed", ex);
LOGGER.warn("An exception occurred downloading NVD CVE - {}\nSome CVEs may not be reported.", nvdCveInfo.getId());
LOGGER.debug("Download Task Failed", ex);
} finally {
Settings.cleanup(false);
}
@@ -252,8 +247,7 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
}
/**
* Extracts the file contained in a gzip archive. The extracted file is placed in the exact same path as the file
* specified.
* Extracts the file contained in a gzip archive. The extracted file is placed in the exact same path as the file specified.
*
* @param file the archive file
* @throws FileNotFoundException thrown if the file does not exist
@@ -287,14 +281,14 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
try {
cin.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, "ignore", ex);
LOGGER.trace("ignore", ex);
}
}
if (out != null) {
try {
out.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, "ignore", ex);
LOGGER.trace("ignore", ex);
}
}
if (gzip.isFile()) {

View File

@@ -15,7 +15,7 @@
*
* Copyright (c) 2012 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.update.xml;
package org.owasp.dependencycheck.data.update.nvd;
import java.util.ArrayList;
import java.util.HashMap;

View File

@@ -15,19 +15,19 @@
*
* Copyright (c) 2012 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.update.xml;
package org.owasp.dependencycheck.data.update.nvd;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.lucene.index.CorruptIndexException;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.dependency.Reference;
import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.SAXNotSupportedException;
@@ -43,7 +43,7 @@ public class NvdCve20Handler extends DefaultHandler {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(NvdCve20Handler.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(NvdCve20Handler.class);
/**
* the current supported schema version.
*/
@@ -172,8 +172,8 @@ public class NvdCve20Handler extends DefaultHandler {
final float score = Float.parseFloat(nodeText.toString());
vulnerability.setCvssScore(score);
} catch (NumberFormatException ex) {
LOGGER.log(Level.SEVERE, "Error parsing CVSS Score.");
LOGGER.log(Level.FINE, null, ex);
LOGGER.error("Error parsing CVSS Score.");
LOGGER.debug("", ex);
}
nodeText = null;
} else if (current.isCVSSAccessVectorNode()) {

View File

@@ -15,7 +15,7 @@
*
* Copyright (c) 2013 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.update;
package org.owasp.dependencycheck.data.update.nvd;
/**
* A pojo that contains the Url and timestamp of the current NvdCve XML files.

View File

@@ -15,7 +15,7 @@
*
* Copyright (c) 2013 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.update.task;
package org.owasp.dependencycheck.data.update.nvd;
import java.io.File;
import java.io.FileNotFoundException;
@@ -24,8 +24,6 @@ import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
@@ -33,10 +31,10 @@ import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.data.update.xml.NvdCve12Handler;
import org.owasp.dependencycheck.data.update.xml.NvdCve20Handler;
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
/**
@@ -49,7 +47,7 @@ public class ProcessTask implements Callable<ProcessTask> {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(ProcessTask.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(ProcessTask.class);
/**
* A field to store any update exceptions that occur during the "call".
*/
@@ -94,8 +92,8 @@ public class ProcessTask implements Callable<ProcessTask> {
*
* @param cveDB the data store object
* @param filePair the download task that contains the URL references to download
* @param settings a reference to the global settings object; this is necessary so that when the thread is started
* the dependencies have a correct reference to the global settings.
* @param settings a reference to the global settings object; this is necessary so that when the thread is started the
* dependencies have a correct reference to the global settings.
*/
public ProcessTask(final CveDB cveDB, final DownloadTask filePair, Settings settings) {
this.cveDB = cveDB;
@@ -108,8 +106,8 @@ public class ProcessTask implements Callable<ProcessTask> {
* Implements the callable interface.
*
* @return this object
* @throws Exception thrown if there is an exception; note that any UpdateExceptions are simply added to the tasks
* exception collection
* @throws Exception thrown if there is an exception; note that any UpdateExceptions are simply added to the tasks exception
* collection
*/
@Override
public ProcessTask call() throws Exception {
@@ -158,8 +156,7 @@ public class ProcessTask implements Callable<ProcessTask> {
* @throws UpdateException thrown if there is an error loading the data into the database
*/
private void processFiles() throws UpdateException {
String msg = String.format("Processing Started for NVD CVE - %s", filePair.getNvdCveInfo().getId());
LOGGER.log(Level.INFO, msg);
LOGGER.info("Processing Started for NVD CVE - {}", filePair.getNvdCveInfo().getId());
try {
importXML(filePair.getFirst(), filePair.getSecond());
cveDB.commit();
@@ -181,7 +178,6 @@ public class ProcessTask implements Callable<ProcessTask> {
} finally {
filePair.cleanup();
}
msg = String.format("Processing Complete for NVD CVE - %s", filePair.getNvdCveInfo().getId());
LOGGER.log(Level.INFO, msg);
LOGGER.info("Processing Complete for NVD CVE - {}", filePair.getNvdCveInfo().getId());
}
}

View File

@@ -15,7 +15,7 @@
*
* Copyright (c) 2012 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.update;
package org.owasp.dependencycheck.data.update.nvd;
import java.net.MalformedURLException;
import java.net.URL;
@@ -27,8 +27,8 @@ import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.Downloader;
/**
* Contains a collection of updateable NvdCveInfo objects. This is used to determine which files need to be downloaded
* and processed.
* Contains a collection of updateable NvdCveInfo objects. This is used to determine which files need to be downloaded and
* processed.
*
* @author Jeremy Long
*/
@@ -67,8 +67,7 @@ public class UpdateableNvdCve implements java.lang.Iterable<NvdCveInfo>, Iterato
*
* @param id the key for the item to be added
* @param url the URL to download the item
* @param oldUrl the URL for the old version of the item (the NVD CVE old schema still contains useful data we
* need).
* @param oldUrl the URL for the old version of the item (the NVD CVE old schema still contains useful data we need).
* @throws MalformedURLException thrown if the URL provided is invalid
* @throws DownloadFailedException thrown if the download fails.
*/
@@ -81,8 +80,7 @@ public class UpdateableNvdCve implements java.lang.Iterable<NvdCveInfo>, Iterato
*
* @param id the key for the item to be added
* @param url the URL to download the item
* @param oldUrl the URL for the old version of the item (the NVD CVE old schema still contains useful data we
* need).
* @param oldUrl the URL for the old version of the item (the NVD CVE old schema still contains useful data we need).
* @param needsUpdate whether or not the data needs to be updated
* @throws MalformedURLException thrown if the URL provided is invalid
* @throws DownloadFailedException thrown if the download fails.
@@ -175,7 +173,7 @@ public class UpdateableNvdCve implements java.lang.Iterable<NvdCveInfo>, Iterato
* @param key the key to lookup the return value
* @return the NvdCveInfo object stored using the specified key
*/
NvdCveInfo get(String key) {
public NvdCveInfo get(String key) {
return collection.get(key);
}

View File

@@ -0,0 +1,4 @@
/**
* Contains classes used to download, parse, and load the NVD CVE data from NIST into the local database.<br/><br/>
*/
package org.owasp.dependencycheck.data.update.nvd;

View File

@@ -1,4 +0,0 @@
/**
* A collection of callable/runnable tasks used to speed up the update process.
*/
package org.owasp.dependencycheck.data.update.task;

View File

@@ -1,8 +0,0 @@
/**
* Contains classes used to parse the NVD CVE XML file.<br/><br/>
*
* The basic use is that the Importer is called to import an NVD CVE file. The Importer instantiates an Indexer object (which
* extends Index). The Indexer creates a partial-unmarshalling SAX parser (implemented in the NvdCveXmlFilter) that extracts
* VulnerabilityTypes (aka Entry) from the NVD CVE data file and stores these into a Lucene Index.
*/
package org.owasp.dependencycheck.data.update.xml;

View File

@@ -27,11 +27,12 @@ import java.util.List;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.lang.ObjectUtils;
import org.owasp.dependencycheck.data.nexus.MavenArtifact;
import org.owasp.dependencycheck.utils.Checksum;
import org.owasp.dependencycheck.utils.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A program dependency. This object is one of the core components within DependencyCheck. It is used to collect information about
@@ -45,7 +46,15 @@ public class Dependency implements Serializable, Comparable<Dependency> {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(Dependency.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(Dependency.class);
/**
* Used as starting point for generating the value in {@link #hashCode()}.
*/
private static final int MAGIC_HASH_INIT_VALUE = 3;
/**
* Used as a multiplier for generating the value in {@link #hashCode()}.
*/
private static final int MAGIC_HASH_MULTIPLIER = 47;
/**
* The actual file path of the dependency on disk.
*/
@@ -58,10 +67,6 @@ public class Dependency implements Serializable, Comparable<Dependency> {
* The file name of the dependency.
*/
private String fileName;
/**
* The file extension of the dependency.
*/
private String fileExtension;
/**
* The md5 hash of the dependency.
*/
@@ -107,10 +112,9 @@ public class Dependency implements Serializable, Comparable<Dependency> {
*/
public Dependency(File file) {
this();
this.actualFilePath = file.getPath();
this.actualFilePath = file.getAbsolutePath();
this.filePath = this.actualFilePath;
this.fileName = file.getName();
this.fileExtension = FileUtils.getFileExtension(fileName);
determineHashes(file);
}
@@ -221,24 +225,6 @@ public class Dependency implements Serializable, Comparable<Dependency> {
return this.filePath;
}
/**
* Sets the file extension of the dependency.
*
* @param fileExtension the file name of the dependency
*/
public void setFileExtension(String fileExtension) {
this.fileExtension = fileExtension;
}
/**
* Gets the file extension of the dependency.
*
* @return the file extension of the dependency
*/
public String getFileExtension() {
return this.fileExtension;
}
/**
* Returns the MD5 Checksum of the dependency file.
*
@@ -345,12 +331,12 @@ public class Dependency implements Serializable, Comparable<Dependency> {
final String url = "http://search.maven.org/#search|ga|1|1%3A%22" + this.getSha1sum() + "%22";
i.setUrl(url);
//i.setUrl(mavenArtifact.getArtifactUrl());
LOGGER.fine(String.format("Already found identifier %s. Confidence set to highest", i.getValue()));
LOGGER.debug("Already found identifier {}. Confidence set to highest", i.getValue());
break;
}
}
if (!found) {
LOGGER.fine(String.format("Adding new maven identifier %s", mavenArtifact.toString()));
LOGGER.debug("Adding new maven identifier {}", mavenArtifact.toString());
this.addIdentifier("maven", mavenArtifact.toString(), mavenArtifact.getArtifactUrl(), Confidence.HIGHEST);
}
}
@@ -564,13 +550,11 @@ public class Dependency implements Serializable, Comparable<Dependency> {
md5 = Checksum.getMD5Checksum(file);
sha1 = Checksum.getSHA1Checksum(file);
} catch (IOException ex) {
final String msg = String.format("Unable to read '%s' to determine hashes.", file.getName());
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.warn("Unable to read '{}' to determine hashes.", file.getName());
LOGGER.debug("", ex);
} catch (NoSuchAlgorithmException ex) {
final String msg = "Unable to use MD5 of SHA1 checksums.";
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.warn("Unable to use MD5 of SHA1 checksums.");
LOGGER.debug("", ex);
}
this.setMd5sum(md5);
this.setSha1sum(sha1);
@@ -591,7 +575,8 @@ public class Dependency implements Serializable, Comparable<Dependency> {
private Set<Dependency> relatedDependencies = new TreeSet<Dependency>();
/**
* Get the value of relatedDependencies.
* Get the value of {@link #relatedDependencies}. This field is used to collect other dependencies which really represent the
* same dependency, and may be presented as one item in reports.
*
* @return the value of relatedDependencies
*/
@@ -650,18 +635,22 @@ public class Dependency implements Serializable, Comparable<Dependency> {
}
/**
* Adds a related dependency.
* Adds a related dependency. The internal collection is normally a {@link java.util.TreeSet}, which relies on
* {@link #compareTo(Dependency)}. A consequence of this is that if you attempt to add a dependency with the same file path
* (modulo character case) as one that is already in the collection, it won't get added.
*
* @param dependency a reference to the related dependency
*/
public void addRelatedDependency(Dependency dependency) {
if (this == dependency) {
LOGGER.warning("Attempted to add a circular reference - please post the log file to issue #172 here "
+ "https://github.com/jeremylong/DependencyCheck/issues/172 ");
LOGGER.log(Level.FINE, "this: {0}", this.toString());
LOGGER.log(Level.FINE, "dependency: {0}", dependency.toString());
} else {
relatedDependencies.add(dependency);
LOGGER.warn("Attempted to add a circular reference - please post the log file to issue #172 here "
+ "https://github.com/jeremylong/DependencyCheck/issues/172");
LOGGER.debug("this: {}", this);
LOGGER.debug("dependency: {}", dependency);
} else if (!relatedDependencies.add(dependency)) {
LOGGER.debug("Failed to add dependency, likely due to referencing the same file as another dependency in the set.");
LOGGER.debug("this: {}", this);
LOGGER.debug("dependency: {}", dependency);
}
}
@@ -698,7 +687,7 @@ public class Dependency implements Serializable, Comparable<Dependency> {
}
/**
* Implementation of the Comparable<Dependency> interface. The comparison is solely based on the file name.
* Implementation of the Comparable<Dependency> interface. The comparison is solely based on the file path.
*
* @param o a dependency to compare
* @return an integer representing the natural ordering
@@ -715,66 +704,25 @@ public class Dependency implements Serializable, Comparable<Dependency> {
*/
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
final Dependency other = (Dependency) obj;
if ((this.actualFilePath == null) ? (other.actualFilePath != null) : !this.actualFilePath.equals(other.actualFilePath)) {
return false;
}
if ((this.filePath == null) ? (other.filePath != null) : !this.filePath.equals(other.filePath)) {
return false;
}
if ((this.fileName == null) ? (other.fileName != null) : !this.fileName.equals(other.fileName)) {
return false;
}
if ((this.fileExtension == null) ? (other.fileExtension != null) : !this.fileExtension.equals(other.fileExtension)) {
return false;
}
if ((this.md5sum == null) ? (other.md5sum != null) : !this.md5sum.equals(other.md5sum)) {
return false;
}
if ((this.sha1sum == null) ? (other.sha1sum != null) : !this.sha1sum.equals(other.sha1sum)) {
return false;
}
if (this.identifiers != other.identifiers && (this.identifiers == null || !this.identifiers.equals(other.identifiers))) {
return false;
}
if (this.vendorEvidence != other.vendorEvidence && (this.vendorEvidence == null || !this.vendorEvidence.equals(other.vendorEvidence))) {
return false;
}
if (this.productEvidence != other.productEvidence && (this.productEvidence == null || !this.productEvidence.equals(other.productEvidence))) {
return false;
}
if (this.versionEvidence != other.versionEvidence && (this.versionEvidence == null || !this.versionEvidence.equals(other.versionEvidence))) {
return false;
}
if ((this.description == null) ? (other.description != null) : !this.description.equals(other.description)) {
return false;
}
if ((this.license == null) ? (other.license != null) : !this.license.equals(other.license)) {
return false;
}
if (this.vulnerabilities != other.vulnerabilities && (this.vulnerabilities == null || !this.vulnerabilities.equals(other.vulnerabilities))) {
return false;
}
if (this.relatedDependencies != other.relatedDependencies
&& (this.relatedDependencies == null || !this.relatedDependencies.equals(other.relatedDependencies))) {
return false;
}
if (this.projectReferences != other.projectReferences
&& (this.projectReferences == null || !this.projectReferences.equals(other.projectReferences))) {
return false;
}
if (this.availableVersions != other.availableVersions
&& (this.availableVersions == null || !this.availableVersions.equals(other.availableVersions))) {
return false;
}
return true;
return ObjectUtils.equals(this.actualFilePath, other.actualFilePath)
&& ObjectUtils.equals(this.filePath, other.filePath)
&& ObjectUtils.equals(this.fileName, other.fileName)
&& ObjectUtils.equals(this.md5sum, other.md5sum)
&& ObjectUtils.equals(this.sha1sum, other.sha1sum)
&& ObjectUtils.equals(this.identifiers, other.identifiers)
&& ObjectUtils.equals(this.vendorEvidence, other.vendorEvidence)
&& ObjectUtils.equals(this.productEvidence, other.productEvidence)
&& ObjectUtils.equals(this.versionEvidence, other.versionEvidence)
&& ObjectUtils.equals(this.description, other.description)
&& ObjectUtils.equals(this.license, other.license)
&& ObjectUtils.equals(this.vulnerabilities, other.vulnerabilities)
&& ObjectUtils.equals(this.relatedDependencies, other.relatedDependencies)
&& ObjectUtils.equals(this.projectReferences, other.projectReferences)
&& ObjectUtils.equals(this.availableVersions, other.availableVersions);
}
/**
@@ -784,23 +732,13 @@ public class Dependency implements Serializable, Comparable<Dependency> {
*/
@Override
public int hashCode() {
int hash = 3;
hash = 47 * hash + (this.actualFilePath != null ? this.actualFilePath.hashCode() : 0);
hash = 47 * hash + (this.filePath != null ? this.filePath.hashCode() : 0);
hash = 47 * hash + (this.fileName != null ? this.fileName.hashCode() : 0);
hash = 47 * hash + (this.fileExtension != null ? this.fileExtension.hashCode() : 0);
hash = 47 * hash + (this.md5sum != null ? this.md5sum.hashCode() : 0);
hash = 47 * hash + (this.sha1sum != null ? this.sha1sum.hashCode() : 0);
hash = 47 * hash + (this.identifiers != null ? this.identifiers.hashCode() : 0);
hash = 47 * hash + (this.vendorEvidence != null ? this.vendorEvidence.hashCode() : 0);
hash = 47 * hash + (this.productEvidence != null ? this.productEvidence.hashCode() : 0);
hash = 47 * hash + (this.versionEvidence != null ? this.versionEvidence.hashCode() : 0);
hash = 47 * hash + (this.description != null ? this.description.hashCode() : 0);
hash = 47 * hash + (this.license != null ? this.license.hashCode() : 0);
hash = 47 * hash + (this.vulnerabilities != null ? this.vulnerabilities.hashCode() : 0);
hash = 47 * hash + (this.relatedDependencies != null ? this.relatedDependencies.hashCode() : 0);
hash = 47 * hash + (this.projectReferences != null ? this.projectReferences.hashCode() : 0);
hash = 47 * hash + (this.availableVersions != null ? this.availableVersions.hashCode() : 0);
int hash = MAGIC_HASH_INIT_VALUE;
for (Object field : new Object[]{this.actualFilePath, this.filePath, this.fileName, this.md5sum,
this.sha1sum, this.identifiers, this.vendorEvidence, this.productEvidence, this.versionEvidence,
this.description, this.license, this.vulnerabilities, this.relatedDependencies, this.projectReferences,
this.availableVersions}) {
hash = MAGIC_HASH_MULTIPLIER * hash + ObjectUtils.hashCode(field);
}
return hash;
}

View File

@@ -17,6 +17,9 @@
*/
package org.owasp.dependencycheck.dependency;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import java.io.Serializable;
/**
@@ -26,6 +29,16 @@ import java.io.Serializable;
*/
public class Evidence implements Serializable, Comparable<Evidence> {
/**
* Used as starting point for generating the value in {@link #hashCode()}.
*/
private static final int MAGIC_HASH_INIT_VALUE = 3;
/**
* Used as a multiplier for generating the value in {@link #hashCode()}.
*/
private static final int MAGIC_HASH_MULTIPLIER = 67;
/**
* Creates a new Evidence object.
*/
@@ -35,9 +48,9 @@ public class Evidence implements Serializable, Comparable<Evidence> {
/**
* Creates a new Evidence objects.
*
* @param source the source of the evidence.
* @param name the name of the evidence.
* @param value the value of the evidence.
* @param source the source of the evidence.
* @param name the name of the evidence.
* @param value the value of the evidence.
* @param confidence the confidence of the evidence.
*/
public Evidence(String source, String name, String value, Confidence confidence) {
@@ -46,6 +59,7 @@ public class Evidence implements Serializable, Comparable<Evidence> {
this.value = value;
this.confidence = confidence;
}
/**
* The name of the evidence.
*/
@@ -68,6 +82,7 @@ public class Evidence implements Serializable, Comparable<Evidence> {
public void setName(String name) {
this.name = name;
}
/**
* The source of the evidence.
*/
@@ -90,6 +105,7 @@ public class Evidence implements Serializable, Comparable<Evidence> {
public void setSource(String source) {
this.source = source;
}
/**
* The value of the evidence.
*/
@@ -124,6 +140,7 @@ public class Evidence implements Serializable, Comparable<Evidence> {
public void setValue(String value) {
this.value = value;
}
/**
* A value indicating if the Evidence has been "used" (aka read).
*/
@@ -146,6 +163,7 @@ public class Evidence implements Serializable, Comparable<Evidence> {
public void setUsed(boolean used) {
this.used = used;
}
/**
* The confidence level for the evidence.
*/
@@ -176,11 +194,11 @@ public class Evidence implements Serializable, Comparable<Evidence> {
*/
@Override
public int hashCode() {
int hash = 3;
hash = 67 * hash + (this.name != null ? this.name.hashCode() : 0);
hash = 67 * hash + (this.source != null ? this.source.hashCode() : 0);
hash = 67 * hash + (this.value != null ? this.value.hashCode() : 0);
hash = 67 * hash + (this.confidence != null ? this.confidence.hashCode() : 0);
int hash = MAGIC_HASH_INIT_VALUE;
hash = MAGIC_HASH_MULTIPLIER * hash + ObjectUtils.hashCode(StringUtils.lowerCase(this.name));
hash = MAGIC_HASH_MULTIPLIER * hash + ObjectUtils.hashCode(StringUtils.lowerCase(this.source));
hash = MAGIC_HASH_MULTIPLIER * hash + ObjectUtils.hashCode(StringUtils.lowerCase(this.value));
hash = MAGIC_HASH_MULTIPLIER * hash + ObjectUtils.hashCode(this.confidence);
return hash;
}
@@ -200,19 +218,10 @@ public class Evidence implements Serializable, Comparable<Evidence> {
}
final Evidence e = (Evidence) that;
return testEquality(name, e.name) && testEquality(source, e.source) && testEquality(value, e.value)
&& (confidence == null ? e.confidence == null : confidence == e.confidence);
}
/**
* Simple equality test for use within the equals method. This does a case insensitive compare.
*
* @param l a string to compare.
* @param r another string to compare.
* @return whether the two strings are the same.
*/
private boolean testEquality(String l, String r) {
return l == null ? r == null : l.equalsIgnoreCase(r);
return StringUtils.equalsIgnoreCase(name, e.name)
&& StringUtils.equalsIgnoreCase(source, e.source)
&& StringUtils.equalsIgnoreCase(value, e.value)
&& ObjectUtils.equals(confidence, e.confidence);
}
/**
@@ -225,13 +234,13 @@ public class Evidence implements Serializable, Comparable<Evidence> {
if (o == null) {
return 1;
}
if (equalsWithNullCheck(source, o.source)) {
if (equalsWithNullCheck(name, o.name)) {
if (equalsWithNullCheck(value, o.value)) {
if (equalsWithNullCheck(confidence, o.confidence)) {
if (StringUtils.equalsIgnoreCase(source, o.source)) {
if (StringUtils.equalsIgnoreCase(name, o.name)) {
if (StringUtils.equalsIgnoreCase(value, o.value)) {
if (ObjectUtils.equals(confidence, o.confidence)) {
return 0; //they are equal
} else {
return compareToWithNullCheck(confidence, o.confidence);
return ObjectUtils.compare(confidence, o.confidence);
}
} else {
return compareToIgnoreCaseWithNullCheck(value, o.value);
@@ -244,43 +253,11 @@ public class Evidence implements Serializable, Comparable<Evidence> {
}
}
/**
* Equality check with an exhaustive, possibly duplicative, check against nulls.
*
* @param me the value to be compared
* @param other the other value to be compared
* @return true if the values are equal; otherwise false
*/
private boolean equalsWithNullCheck(String me, String other) {
if (me == null && other == null) {
return true;
} else if (me == null || other == null) {
return false;
}
return me.equalsIgnoreCase(other);
}
/**
* Equality check with an exhaustive, possibly duplicative, check against nulls.
*
* @param me the value to be compared
* @param other the other value to be compared
* @return true if the values are equal; otherwise false
*/
private boolean equalsWithNullCheck(Confidence me, Confidence other) {
if (me == null && other == null) {
return true;
} else if (me == null || other == null) {
return false;
}
return me.equals(other);
}
/**
* Wrapper around {@link java.lang.String#compareToIgnoreCase(java.lang.String) String.compareToIgnoreCase} with an
* exhaustive, possibly duplicative, check against nulls.
*
* @param me the value to be compared
* @param me the value to be compared
* @param other the other value to be compared
* @return true if the values are equal; otherwise false
*/
@@ -295,25 +272,6 @@ public class Evidence implements Serializable, Comparable<Evidence> {
return me.compareToIgnoreCase(other);
}
/**
* Wrapper around {@link java.lang.Enum#compareTo(java.lang.Enum) Enum.compareTo} with an exhaustive, possibly duplicative,
* check against nulls.
*
* @param me the value to be compared
* @param other the other value to be compared
* @return true if the values are equal; otherwise false
*/
private int compareToWithNullCheck(Confidence me, Confidence other) {
if (me == null && other == null) {
return 0;
} else if (me == null) {
return -1; //the other string is greater then me
} else if (other == null) {
return 1; //me is greater then the other string
}
return me.compareTo(other);
}
/**
* Standard toString() implementation.
*

View File

@@ -24,13 +24,13 @@ import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.lang.StringUtils;
import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
import org.owasp.dependencycheck.utils.Filter;
import org.owasp.dependencycheck.utils.UrlStringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Used to maintain a collection of Evidence.
@@ -42,7 +42,7 @@ public class EvidenceCollection implements Serializable, Iterable<Evidence> {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(EvidenceCollection.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(EvidenceCollection.class);
/**
* Used to iterate over highest confidence evidence contained in the collection.
*/
@@ -141,13 +141,13 @@ public class EvidenceCollection implements Serializable, Iterable<Evidence> {
}
/**
* Adds term to the weighting collection. The terms added here are used later to boost the score of other terms.
* This is a way of combining evidence from multiple sources to boost the confidence of the given evidence.
* Adds term to the weighting collection. The terms added here are used later to boost the score of other terms. This is a way
* of combining evidence from multiple sources to boost the confidence of the given evidence.
*
* Example: The term 'Apache' is found in the manifest of a JAR and is added to the Collection. When we parse the
* package names within the JAR file we may add these package names to the "weighted" strings collection to boost
* the score in the Lucene query. That way when we construct the Lucene query we find the term Apache in the
* collection AND in the weighted strings; as such, we will boost the confidence of the term Apache.
* Example: The term 'Apache' is found in the manifest of a JAR and is added to the Collection. When we parse the package
* names within the JAR file we may add these package names to the "weighted" strings collection to boost the score in the
* Lucene query. That way when we construct the Lucene query we find the term Apache in the collection AND in the weighted
* strings; as such, we will boost the confidence of the term Apache.
*
* @param str to add to the weighting collection.
*/
@@ -156,8 +156,8 @@ public class EvidenceCollection implements Serializable, Iterable<Evidence> {
}
/**
* Returns a set of Weightings - a list of terms that are believed to be of higher confidence when also found in
* another location.
* Returns a set of Weightings - a list of terms that are believed to be of higher confidence when also found in another
* location.
*
* @return Set<String>
*/
@@ -322,11 +322,11 @@ public class EvidenceCollection implements Serializable, Iterable<Evidence> {
final Set<Evidence> ret = new TreeSet<Evidence>();
for (EvidenceCollection col : ec) {
for (Evidence e : col) {
if (e.isUsed()) {
final Evidence newEvidence = new Evidence(e.getSource(), e.getName(), e.getValue(), null);
newEvidence.setUsed(true);
ret.add(newEvidence);
}
//if (e.isUsed()) {
final Evidence newEvidence = new Evidence(e.getSource(), e.getName(), e.getValue(), null);
newEvidence.setUsed(true);
ret.add(newEvidence);
//}
}
}
return ret;
@@ -357,11 +357,11 @@ public class EvidenceCollection implements Serializable, Iterable<Evidence> {
/**
* <p>
* Takes a string that may contain a fully qualified domain and it will return the string having removed the query
* string, the protocol, the sub-domain of 'www', and the file extension of the path.</p>
* Takes a string that may contain a fully qualified domain and it will return the string having removed the query string, the
* protocol, the sub-domain of 'www', and the file extension of the path.</p>
* <p>
* This is useful for checking if the evidence contains a specific string. The presence of the protocol, file
* extension, etc. may produce false positives.
* This is useful for checking if the evidence contains a specific string. The presence of the protocol, file extension, etc.
* may produce false positives.
*
* <p>
* Example, given the following input:</p>
@@ -385,7 +385,7 @@ public class EvidenceCollection implements Serializable, Iterable<Evidence> {
final List<String> data = UrlStringUtils.extractImportantUrlData(part);
sb.append(' ').append(StringUtils.join(data, ' '));
} catch (MalformedURLException ex) {
LOGGER.log(Level.FINE, "error parsing " + part, ex);
LOGGER.debug("error parsing {}", part, ex);
sb.append(' ').append(part);
}
} else {

View File

@@ -20,9 +20,9 @@ package org.owasp.dependencycheck.dependency;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.owasp.dependencycheck.data.cpe.IndexEntry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A record containing information about vulnerable software. This is referenced from a vulnerability.
@@ -34,7 +34,7 @@ public class VulnerableSoftware extends IndexEntry implements Serializable, Comp
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(VulnerableSoftware.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(VulnerableSoftware.class);
/**
* The serial version UID.
*/
@@ -49,17 +49,15 @@ public class VulnerableSoftware extends IndexEntry implements Serializable, Comp
try {
parseName(cpe);
} catch (UnsupportedEncodingException ex) {
final String msg = String.format("Character encoding is unsupported for CPE '%s'.", cpe);
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.warn("Character encoding is unsupported for CPE '{}'.", cpe);
LOGGER.debug("", ex);
setName(cpe);
}
}
/**
* <p>
* Parses a name attribute value, from the cpe.xml, into its corresponding parts: vendor, product, version,
* revision.</p>
* Parses a name attribute value, from the cpe.xml, into its corresponding parts: vendor, product, version, update.</p>
* <p>
* Example:</p>
* <code>&nbsp;&nbsp;&nbsp;cpe:/a:apache:struts:1.1:rc2</code>
@@ -86,7 +84,7 @@ public class VulnerableSoftware extends IndexEntry implements Serializable, Comp
version = urlDecode(data[2]);
}
if (data.length >= 4) {
revision = urlDecode(data[3]);
update = urlDecode(data[3]);
}
if (data.length >= 5) {
edition = urlDecode(data[4]);
@@ -298,26 +296,26 @@ public class VulnerableSoftware extends IndexEntry implements Serializable, Comp
this.version = version;
}
/**
* The product revision version.
* The product update version.
*/
private String revision;
private String update;
/**
* Get the value of revision.
* Get the value of update.
*
* @return the value of revision
* @return the value of update
*/
public String getRevision() {
return revision;
public String getUpdate() {
return update;
}
/**
* Set the value of revision.
* Set the value of update.
*
* @param revision new value of revision
* @param update new value of update
*/
public void setRevision(String revision) {
this.revision = revision;
public void setUpdate(String update) {
this.update = update;
}
/**
* The product edition.
@@ -357,9 +355,20 @@ public class VulnerableSoftware extends IndexEntry implements Serializable, Comp
try {
result = URLDecoder.decode(text, "ASCII");
} catch (UnsupportedEncodingException ex1) {
result = URLDecoder.decode(text);
result = defaultUrlDecode(text);
}
}
return result;
}
/**
* Call {@link java.net.URLDecoder#decode(String)} to URL decode using the default encoding.
*
* @param text www-form-encoded URL to decode
* @return the newly decoded String
*/
@SuppressWarnings("deprecation")
private String defaultUrlDecode(final String text) {
return URLDecoder.decode(text);
}
}

View File

@@ -19,9 +19,9 @@ package org.owasp.dependencycheck.reporting;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.lang.StringEscapeUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An extremely simple wrapper around various escape utils to perform URL and HTML encoding within the reports. This
@@ -34,7 +34,7 @@ public class EscapeTool {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(EscapeTool.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(EscapeTool.class);
/**
* URL Encodes the provided text.
@@ -46,8 +46,8 @@ public class EscapeTool {
try {
return URLEncoder.encode(text, "UTF-8");
} catch (UnsupportedEncodingException ex) {
LOGGER.log(Level.WARNING, "UTF-8 is not supported?");
LOGGER.log(Level.INFO, null, ex);
LOGGER.warn("UTF-8 is not supported?");
LOGGER.info("", ex);
}
return "";
}

View File

@@ -30,8 +30,6 @@ import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import org.apache.velocity.context.Context;
@@ -40,6 +38,8 @@ import org.owasp.dependencycheck.analyzer.Analyzer;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The ReportGenerator is used to, as the name implies, generate reports. Internally the generator uses the Velocity
@@ -52,7 +52,7 @@ public class ReportGenerator {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(ReportGenerator.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(ReportGenerator.class);
/**
* An enumeration of the report formats.
@@ -235,9 +235,8 @@ public class ReportGenerator {
templatePath = templateName;
input = new FileInputStream(f);
} catch (FileNotFoundException ex) {
final String msg = "Unable to generate the report, the report template file could not be found.";
LOGGER.log(Level.SEVERE, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.error("Unable to generate the report, the report template file could not be found.");
LOGGER.debug("", ex);
}
} else {
templatePath = "templates/" + templateName + ".vsl";
@@ -262,20 +261,20 @@ public class ReportGenerator {
try {
writer.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
}
if (outputStream != null) {
try {
outputStream.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
}
try {
reader.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
}
}
@@ -311,7 +310,7 @@ public class ReportGenerator {
try {
outputSteam.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, "ignore", ex);
LOGGER.trace("ignore", ex);
}
}
}

View File

@@ -17,14 +17,14 @@
*/
package org.owasp.dependencycheck.reporting;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.velocity.runtime.RuntimeServices;
import org.apache.velocity.runtime.log.LogChute;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
* DependencyCheck uses {@link java.util.logging.Logger} as a logging framework, and Apache Velocity uses a custom
* DependencyCheck uses {@link org.slf4j.Logger} as a logging framework, and Apache Velocity uses a custom
* logging implementation that outputs to a file named velocity.log by default. This class is an implementation of a
* custom Velocity logger that redirects all velocity logging to the Java Logger class.
* </p><p>
@@ -39,7 +39,7 @@ public class VelocityLoggerRedirect implements LogChute {
/**
* The Logger.
*/
private static final Logger LOGGER = Logger.getLogger(VelocityLoggerRedirect.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(VelocityLoggerRedirect.class);
/**
* This will be invoked once by the LogManager.
@@ -58,7 +58,25 @@ public class VelocityLoggerRedirect implements LogChute {
* @param message the message to be logged
*/
public void log(int level, String message) {
LOGGER.log(getLevel(level), message);
switch (level) {
case TRACE_ID:
LOGGER.trace(message);
break;
case DEBUG_ID:
LOGGER.debug(message);
break;
case INFO_ID:
LOGGER.info(message);
break;
case WARN_ID:
LOGGER.warn(message);
break;
case ERROR_ID:
LOGGER.error(message);
break;
default:
LOGGER.info(message);
}
}
/**
@@ -70,7 +88,25 @@ public class VelocityLoggerRedirect implements LogChute {
* @param t a throwable to log
*/
public void log(int level, String message, Throwable t) {
LOGGER.log(getLevel(level), message, t);
switch (level) {
case TRACE_ID:
LOGGER.trace(message, t);
break;
case DEBUG_ID:
LOGGER.debug(message, t);
break;
case INFO_ID:
LOGGER.info(message, t);
break;
case WARN_ID:
LOGGER.warn(message, t);
break;
case ERROR_ID:
LOGGER.error(message, t);
break;
default:
LOGGER.info(message, t);
}
}
/**
@@ -82,27 +118,4 @@ public class VelocityLoggerRedirect implements LogChute {
public boolean isLevelEnabled(int level) {
return true;
}
/**
* Maps Velocity log levels to {@link Logger} values.
*
* @param velocityLevel the logging level
* @return the logging level
*/
private Level getLevel(int velocityLevel) {
switch (velocityLevel) {
case TRACE_ID:
return Level.ALL;
case DEBUG_ID:
return Level.FINE;
case INFO_ID:
return Level.INFO;
case WARN_ID:
return Level.WARNING;
case ERROR_ID:
return Level.SEVERE;
default:
return Level.INFO;
}
}
}

View File

@@ -17,8 +17,8 @@
*/
package org.owasp.dependencycheck.suppression;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
@@ -33,7 +33,7 @@ public class SuppressionErrorHandler implements ErrorHandler {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(SuppressionErrorHandler.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(SuppressionErrorHandler.class);
/**
* Builds a prettier exception message.
@@ -70,7 +70,7 @@ public class SuppressionErrorHandler implements ErrorHandler {
*/
@Override
public void warning(SAXParseException ex) throws SAXException {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
}
/**

View File

@@ -25,11 +25,12 @@ import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
@@ -44,7 +45,7 @@ public class SuppressionParser {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(SuppressionParser.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(SuppressionParser.class);
/**
* JAXP Schema Language. Source: http://docs.oracle.com/javase/tutorial/jaxp/sax/validation.html
*/
@@ -71,14 +72,14 @@ public class SuppressionParser {
fis = new FileInputStream(file);
return parseSuppressionRules(fis);
} catch (IOException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new SuppressionParseException(ex);
} finally {
if (fis != null) {
try {
fis.close();
} catch (IOException ex) {
LOGGER.log(Level.FINE, "Unable to close stream", ex);
LOGGER.debug("Unable to close stream", ex);
}
}
}
@@ -113,16 +114,16 @@ public class SuppressionParser {
return handler.getSuppressionRules();
} catch (ParserConfigurationException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new SuppressionParseException(ex);
} catch (SAXException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new SuppressionParseException(ex);
} catch (FileNotFoundException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new SuppressionParseException(ex);
} catch (IOException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new SuppressionParseException(ex);
}
}

View File

@@ -21,9 +21,9 @@ import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
@@ -34,7 +34,7 @@ public final class DBUtils {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(DBUtils.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(DBUtils.class);
/**
* Private constructor for a utility class.
@@ -76,7 +76,7 @@ public final class DBUtils {
try {
statement.close();
} catch (SQLException ex) {
LOGGER.log(Level.FINEST, statement.toString(), ex);
LOGGER.trace(statement.toString(), ex);
}
}
}
@@ -91,7 +91,7 @@ public final class DBUtils {
try {
rs.close();
} catch (SQLException ex) {
LOGGER.log(Level.FINEST, rs.toString(), ex);
LOGGER.trace(rs.toString(), ex);
}
}
}

View File

@@ -17,8 +17,6 @@
*/
package org.owasp.dependencycheck.utils;
import static org.owasp.dependencycheck.utils.FileUtils.getFileExtension;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.Closeable;
@@ -29,8 +27,6 @@ import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
@@ -40,6 +36,8 @@ import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.analyzer.exception.ArchiveExtractionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Set of utilities to extract files from archives.
@@ -51,7 +49,7 @@ public final class ExtractionUtil {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(ExtractionUtil.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(ExtractionUtil.class);
/**
* The buffer size to use when extracting files from the archive.
*/
@@ -94,7 +92,7 @@ public final class ExtractionUtil {
try {
fis = new FileInputStream(archive);
} catch (FileNotFoundException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new ExtractionException("Archive file was not found.", ex);
}
zis = new ZipInputStream(new BufferedInputStream(fis));
@@ -109,8 +107,7 @@ public final class ExtractionUtil {
}
} else {
final File file = new File(extractTo, entry.getName());
final String ext = getFileExtension(file.getName());
if (engine == null || engine.supportsExtension(ext)) {
if (engine == null || engine.accept(file)) {
BufferedOutputStream bos = null;
FileOutputStream fos;
try {
@@ -118,11 +115,11 @@ public final class ExtractionUtil {
bos = new BufferedOutputStream(fos, BUFFER_SIZE);
transferUsingBuffer(zis, bos);
} catch (FileNotFoundException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
final String msg = String.format("Unable to find file '%s'.", file.getName());
throw new ExtractionException(msg, ex);
} catch (IOException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
final String msg = String.format("IO Exception while parsing file '%s'.", file.getName());
throw new ExtractionException(msg, ex);
} finally {
@@ -133,7 +130,7 @@ public final class ExtractionUtil {
}
} catch (IOException ex) {
final String msg = String.format("Exception reading archive '%s'.", archive.getName());
LOGGER.log(Level.FINE, msg, ex);
LOGGER.debug("", ex);
throw new ExtractionException(msg, ex);
} finally {
closeStream(zis);
@@ -158,22 +155,20 @@ public final class ExtractionUtil {
try {
fis = new FileInputStream(archive);
} catch (FileNotFoundException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new ExtractionException("Archive file was not found.", ex);
}
try {
extractArchive(new ZipArchiveInputStream(new BufferedInputStream(
fis)), destination, filter);
} catch (ArchiveExtractionException ex) {
final String msg = String.format(
"Exception extracting archive '%s'.", archive.getName());
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.warn("Exception extracting archive '{}'.", archive.getName());
LOGGER.debug("", ex);
} finally {
try {
fis.close();
} catch (IOException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
}
}
}
@@ -228,9 +223,8 @@ public final class ExtractionUtil {
FilenameFilter filter, ArchiveEntry entry) throws ExtractionException {
final File file = new File(destination, entry.getName());
if (filter.accept(file.getParentFile(), file.getName())) {
final String extracting = String.format("Extracting '%s'",
LOGGER.debug("Extracting '{}'",
file.getPath());
LOGGER.fine(extracting);
BufferedOutputStream bos = null;
FileOutputStream fos = null;
try {
@@ -239,12 +233,12 @@ public final class ExtractionUtil {
bos = new BufferedOutputStream(fos, BUFFER_SIZE);
transferUsingBuffer(input, bos);
} catch (FileNotFoundException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
final String msg = String.format("Unable to find file '%s'.",
file.getName());
throw new ExtractionException(msg, ex);
} catch (IOException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
final String msg = String
.format("IO Exception while parsing file '%s'.",
file.getName());
@@ -283,7 +277,7 @@ public final class ExtractionUtil {
try {
stream.close();
} catch (IOException ex) {
LOGGER.log(Level.FINEST, null, ex);
LOGGER.trace("", ex);
}
}
}
@@ -306,5 +300,4 @@ public final class ExtractionUtil {
}
}
}
}

View File

@@ -0,0 +1,138 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 Institute for Defense Analyses. All Rights Reserved.
*/
package org.owasp.dependencycheck.utils;
import org.apache.commons.io.IOCase;
import org.apache.commons.io.filefilter.IOFileFilter;
import org.apache.commons.io.filefilter.NameFileFilter;
import org.apache.commons.io.filefilter.OrFileFilter;
import org.apache.commons.io.filefilter.SuffixFileFilter;
import java.io.FileFilter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* <p>
* Utility class for building useful {@link FileFilter} instances for
* {@link org.owasp.dependencycheck.analyzer.AbstractFileTypeAnalyzer} implementations. The built filter uses {@link OrFileFilter}
* to logically OR the given filter conditions. Example usage:</p>
*
* <pre>
* FileFilter filter = FileFilterBuilder.newInstance().addExtensions("jar", "war").build();
* </pre>
*
* @author Dale Visser <dvisser@ida.org>
* @see <a href="https://en.wikipedia.org/wiki/Builder_pattern">Builder pattern</a>
*/
public class FileFilterBuilder {
/**
* A set of filenames to filter.
*/
private final Set<String> filenames = new HashSet<String>();
/**
* A set of extensions to filter.
*/
private final Set<String> extensions = new HashSet<String>();
/**
* An array list of file filters.
*/
private final List<IOFileFilter> fileFilters = new ArrayList<IOFileFilter>();
/**
* Create a new instance and return it. This method is for convenience in using the builder pattern within a single statement.
*
* @return a new builder instance
*/
public static FileFilterBuilder newInstance() {
return new FileFilterBuilder();
}
/**
* Add to the set of filenames to accept for analysis. Case-sensitivity is assumed.
*
* @param names one or more filenames to accept for analysis
* @return this builder
*/
public FileFilterBuilder addFilenames(String... names) {
filenames.addAll(Arrays.asList(names));
return this;
}
/**
* Add to the set of file extensions to accept for analysis. Case-insensitivity is assumed.
*
* @param extensions one or more file extensions to accept for analysis
* @return this builder
*/
public FileFilterBuilder addExtensions(String... extensions) {
return this.addExtensions(Arrays.asList(extensions));
}
/**
* Add to the set of file extensions to accept for analysis. Case-insensitivity is assumed.
*
* @param extensions one or more file extensions to accept for analysis
* @return this builder
*/
public FileFilterBuilder addExtensions(Iterable<String> extensions) {
for (String extension : extensions) {
// Ultimately, SuffixFileFilter will be used, and the "." needs to be explicit.
this.extensions.add(extension.startsWith(".") ? extension : "." + extension);
}
return this;
}
/**
* Add to a list of {@link IOFileFilter} instances to consult for whether to accept a file for analysis.
*
* @param filters one or more file filters to consult for whether to accept for analysis
* @return this builder
*/
public FileFilterBuilder addFileFilters(IOFileFilter... filters) {
fileFilters.addAll(Arrays.asList(filters));
return this;
}
/**
* Builds the filter and returns it.
*
* @return a filter that is the logical OR of all the conditions provided by the add... methods
* @throws IllegalStateException if no add... method has been called with one or more arguments
*/
public FileFilter build() {
if (filenames.isEmpty() && extensions.isEmpty() && fileFilters.isEmpty()) {
throw new IllegalStateException("May only be invoked after at least one add... method has been invoked.");
}
final OrFileFilter filter = new OrFileFilter();
if (!filenames.isEmpty()) {
filter.addFileFilter(new NameFileFilter(new ArrayList<String>(filenames)));
}
if (!extensions.isEmpty()) {
filter.addFileFilter(new SuffixFileFilter(new ArrayList<String>(extensions), IOCase.INSENSITIVE));
}
for (IOFileFilter iof : fileFilters) {
filter.addFileFilter(iof);
}
return filter;
}
}

View File

@@ -24,11 +24,12 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
@@ -43,7 +44,7 @@ public class PomParser {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(PomParser.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(PomParser.class);
/**
* Parses the given xml file and returns a Model object containing only the fields dependency-check requires.
@@ -58,14 +59,14 @@ public class PomParser {
fis = new FileInputStream(file);
return parse(fis);
} catch (IOException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new PomParseException(ex);
} finally {
if (fis != null) {
try {
fis.close();
} catch (IOException ex) {
LOGGER.log(Level.FINE, "Unable to close stream", ex);
LOGGER.debug("Unable to close stream", ex);
}
}
}
@@ -96,16 +97,16 @@ public class PomParser {
return handler.getModel();
} catch (ParserConfigurationException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new PomParseException(ex);
} catch (SAXException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new PomParseException(ex);
} catch (FileNotFoundException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new PomParseException(ex);
} catch (IOException ex) {
LOGGER.log(Level.FINE, null, ex);
LOGGER.debug("", ex);
throw new PomParseException(ex);
}
}

View File

@@ -20,12 +20,12 @@ package org.owasp.dependencycheck.xml.pom;
import java.io.File;
import java.io.IOException;
import java.util.jar.JarFile;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.zip.ZipEntry;
import org.owasp.dependencycheck.analyzer.JarAnalyzer;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
@@ -41,15 +41,14 @@ public final class PomUtils {
/**
* The logger.
*/
private static final Logger LOGGER = Logger.getLogger(PomUtils.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(PomUtils.class);
/**
* Reads in the specified POM and converts it to a Model.
*
* @param file the pom.xml file
* @return returns a
* @throws AnalysisException is thrown if there is an exception extracting or parsing the POM
* {@link org.owasp.dependencycheck.jaxb.pom.generated.Model} object
* @throws AnalysisException is thrown if there is an exception extracting or parsing the POM {@link Model} object
*/
public static Model readPom(File file) throws AnalysisException {
Model model = null;
@@ -57,19 +56,16 @@ public final class PomUtils {
final PomParser parser = new PomParser();
model = parser.parse(file);
} catch (PomParseException ex) {
final String msg = String.format("Unable to parse pom '%s'", file.getPath());
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, "", ex);
LOGGER.warn("Unable to parse pom '{}'", file.getPath());
LOGGER.debug("", ex);
throw new AnalysisException(ex);
} catch (IOException ex) {
final String msg = String.format("Unable to parse pom '%s'(IO Exception)", file.getPath());
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, "", ex);
LOGGER.warn("Unable to parse pom '{}'(IO Exception)", file.getPath());
LOGGER.debug("", ex);
throw new AnalysisException(ex);
} catch (Throwable ex) {
final String msg = String.format("Unexpected error during parsing of the pom '%s'", file.getPath());
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, "", ex);
LOGGER.warn("Unexpected error during parsing of the pom '{}'", file.getPath());
LOGGER.debug("", ex);
throw new AnalysisException(ex);
}
return model;
@@ -81,8 +77,7 @@ public final class PomUtils {
* @param path the path to the pom.xml file within the jar file
* @param jar the jar file to extract the pom from
* @return returns a
* @throws AnalysisException is thrown if there is an exception extracting or parsing the POM
* {@link org.owasp.dependencycheck.jaxb.pom.generated.Model} object
* @throws AnalysisException is thrown if there is an exception extracting or parsing the POM {@link Model} object
*/
public static Model readPom(String path, JarFile jar) throws AnalysisException {
final ZipEntry entry = jar.getEntry(path);
@@ -91,21 +86,18 @@ public final class PomUtils {
try {
final PomParser parser = new PomParser();
model = parser.parse(jar.getInputStream(entry));
LOGGER.fine(String.format("Read POM %s", path));
LOGGER.debug("Read POM {}", path);
} catch (SecurityException ex) {
final String msg = String.format("Unable to parse pom '%s' in jar '%s'; invalid signature", path, jar.getName());
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, null, ex);
LOGGER.warn("Unable to parse pom '{}' in jar '{}'; invalid signature", path, jar.getName());
LOGGER.debug("", ex);
throw new AnalysisException(ex);
} catch (IOException ex) {
final String msg = String.format("Unable to parse pom '%s' in jar '%s' (IO Exception)", path, jar.getName());
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, "", ex);
LOGGER.warn("Unable to parse pom '{}' in jar '{}' (IO Exception)", path, jar.getName());
LOGGER.debug("", ex);
throw new AnalysisException(ex);
} catch (Throwable ex) {
final String msg = String.format("Unexpected error during parsing of the pom '%s' in jar '%s'", path, jar.getName());
LOGGER.log(Level.WARNING, msg);
LOGGER.log(Level.FINE, "", ex);
LOGGER.warn("Unexpected error during parsing of the pom '{}' in jar '{}'", path, jar.getName());
LOGGER.debug("", ex);
throw new AnalysisException(ex);
}
}

View File

@@ -13,4 +13,7 @@ org.owasp.dependencycheck.analyzer.NexusAnalyzer
org.owasp.dependencycheck.analyzer.NuspecAnalyzer
org.owasp.dependencycheck.analyzer.AssemblyAnalyzer
org.owasp.dependencycheck.analyzer.PythonDistributionAnalyzer
org.owasp.dependencycheck.analyzer.PythonPackageAnalyzer
org.owasp.dependencycheck.analyzer.PythonPackageAnalyzer
org.owasp.dependencycheck.analyzer.AutoconfAnalyzer
org.owasp.dependencycheck.analyzer.OpenSSLAnalyzer
org.owasp.dependencycheck.analyzer.CMakeAnalyzer

View File

@@ -15,7 +15,7 @@
DELETE_REFERENCE=DELETE FROM reference WHERE cveid = ?
DELETE_SOFTWARE=DELETE FROM software WHERE cveid = ?
DELETE_VULNERABILITY=DELETE FROM vulnerability WHERE id = ?
CLEANUP_ORPHANS=DELETE FROM CpeEntry WHERE id not in (SELECT CPEEntryId FROM Software);
CLEANUP_ORPHANS=DELETE FROM cpeEntry WHERE id not in (SELECT CPEEntryId FROM software);
INSERT_REFERENCE=INSERT INTO reference (cveid, name, url, source) VALUES (?, ?, ?, ?)
INSERT_SOFTWARE=INSERT INTO software (cveid, cpeEntryId, previousVersion) VALUES (?, ?, ?)
INSERT_CPE=INSERT INTO cpeEntry (cpe, vendor, product) VALUES (?, ?, ?)
@@ -34,3 +34,6 @@ SELECT_PROPERTY=SELECT id, value FROM properties WHERE id = ?
INSERT_PROPERTY=INSERT INTO properties (id, value) VALUES (?, ?)
UPDATE_PROPERTY=UPDATE properties SET value = ? WHERE id = ?
DELETE_PROPERTY=DELETE FROM properties WHERE id = ?
DELETE_UNUSED_DICT_CPE=DELETE FROM cpeEntry WHERE dictionaryEntry=true AND id NOT IN (SELECT cpeEntryId FROM software)
ADD_DICT_CPE=MERGE INTO cpeEntry (cpe, vendor, product, dictionaryEntry) KEY(cpe) VALUES(?,?,?,true)

View File

@@ -0,0 +1,7 @@
--the following is not currently used.
--ALTER TABLE cpeEntry ADD COLUMN IF NOT EXISTS dictionaryEntry BOOLEAN;
--ALTER TABLE cpeEntry ALTER COLUMN dictionaryEntry SET DEFAULT FALSE;
--UPDATE cpeEntry SET dictionaryEntry=false;
--UPDATE Properties SET value='3.0' WHERE ID='version';

View File

@@ -138,4 +138,19 @@
<gav regex="true">com.microsoft.bingads:microsoft.bingads:.*</gav>
<cpe>cpe:/a:microsoft:bing</cpe>
</suppress>
<suppress base="true">
<notes><![CDATA[
Oracle Jersey is flagged as glassfish.
]]></notes>
<gav regex="true">.*jersey.*</gav>
<cpe>cpe:/a:oracle:glassfish_server</cpe>
<cpe>cpe:/a:oracle:glassfish</cpe>
</suppress>
<suppress base="true">
<notes><![CDATA[
Oracle HK2 is flagged as glassfish.
]]></notes>
<gav regex="true">.*\bhk2\b.*</gav>
<cpe>cpe:/a:oracle:glassfish</cpe>
</suppress>
</suppressions>

View File

@@ -1,10 +0,0 @@
analyzer.AssemblyAnalyzer.notdeployed=GrokAssembly didn't get deployed
analyzer.AssemblyAnalyzer.grokassembly.stderr=Error from GrokAssembly: {0}
analyzer.AssemblyAnalyzer.notassembly={0} is not a .NET assembly or executable and as such cannot be analyzed by dependency-check
analyzer.AssemblyAnalyzer.grokassembly.rc=Return code {0} from GrokAssembly
analyzer.AssemblyAnalyzer.grokassembly.deployed=Extracted GrokAssembly.exe to {0}
analyzer.AssemblyAnalyzer.grokassembly.notdeployed=Could not extract GrokAssembly.exe: {0}
analyzer.AssemblyAnalyzer.grokassembly.initialization.failed=An error occurred with the .NET AssemblyAnalyzer; \
this can be ignored unless you are scanning .NET DLLs. Please see the log for more details.
analyzer.AssemblyAnalyzer.grokassembly.initialization.message=Could not execute GrokAssembly {0}
analyzer.AssemblyAnalyzer.grokassembly.notdeleted=Can't delete temporary GrokAssembly.exe

View File

@@ -17,7 +17,7 @@ engine.version.url=http://jeremylong.github.io/DependencyCheck/current.txt
# below contains a %s then the data.directory will replace the %s.
data.directory=[JAR]/data
#if the filename has a %s it will be replaced with the current expected version
data.file_name=cve.%s.h2.db
data.file_name=dc.h2.db
data.version=2.9
data.connection_string=jdbc:h2:file:%s;FILE_LOCK=SERIALIZED;AUTOCOMMIT=ON;
#data.connection_string=jdbc:mysql://localhost:3306/dependencycheck
@@ -53,6 +53,8 @@ cve.url-1.2.base=https://nvd.nist.gov/download/nvdcve-%d.xml.gz
cve.url-2.0.base=https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml.gz
#cve.url-2.0.base=http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml
cpe.validfordays=30
cpe.url=http://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.3.xml.gz
# file type analyzer settings:
analyzer.archive.enabled=true
@@ -70,3 +72,9 @@ analyzer.nexus.proxy=true
# the URL for searching search.maven.org for SHA-1 and whether it's enabled
analyzer.central.enabled=true
analyzer.central.url=http://search.maven.org/solrsearch/select
# the number of nested archives that will be searched.
archive.scan.depth=3
# use HEAD (default) or GET as HTTP request method for query timestamp
downloader.quick.query.timestamp=true

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<xs:schema id="analysis" xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" targetNamespace="https://www.owasp.org/index.php/OWASP_Dependency_Check#1.2">
<xs:schema id="analysis" xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" targetNamespace="https://jeremylong.github.io/DependencyCheck/dependency-check.1.3.xsd">
<xs:element name="analysis">
<xs:complexType>
<xs:sequence minOccurs="0" maxOccurs="unbounded">
@@ -111,6 +111,8 @@
<xs:element name="name" type="xs:string" minOccurs="1" maxOccurs="1" />
<xs:element name="value" type="xs:string" minOccurs="1" maxOccurs="1" />
</xs:sequence>
<xs:attribute name="type" type="xs:string" use="required" />
<xs:attribute name="confidence" type="xs:string" use="required" />
</xs:complexType>
</xs:element>
</xs:sequence>
@@ -156,7 +158,13 @@
<xs:complexType>
<xs:sequence>
<xs:element name="name" type="xs:string" minOccurs="1" maxOccurs="1" />
<xs:element name="cvssScore" type="xs:string" minOccurs="1" maxOccurs="1" />
<xs:element name="cvssScore" type="xs:decimal" minOccurs="1" maxOccurs="1" />
<xs:element name="cvssAccessVector" type="xs:string" minOccurs="1" maxOccurs="1" />
<xs:element name="cvssAccessComplexity" type="xs:string" minOccurs="1" maxOccurs="1" />
<xs:element name="cvssAuthenticationr" type="xs:string" minOccurs="1" maxOccurs="1" />
<xs:element name="cvssConfidentialImpact" type="xs:string" minOccurs="1" maxOccurs="1" />
<xs:element name="cvssIntegrityImpact" type="xs:string" minOccurs="1" maxOccurs="1" />
<xs:element name="cvssAvailabilityImpact" type="xs:string" minOccurs="1" maxOccurs="1" />
<xs:element name="severity" type="xs:string" minOccurs="1" maxOccurs="1" />
<xs:element name="cwe" type="xs:string" minOccurs="0" maxOccurs="1" />
<xs:element name="description" type="xs:string" minOccurs="1" maxOccurs="1" />

View File

@@ -16,7 +16,7 @@ limitations under the License.
Copyright (c) 2012 Jeremy Long. All Rights Reserved.
@author Jeremy Long <jeremy.long@owasp.org>
@version 1.1
@version 1.2
*#
@@ -560,7 +560,7 @@ arising out of or in connection with the use of this tool, the analysis performe
<th class="sortable" data-sort="int" title="The highest CVE Severity">Highest Severity</th>
<th class="sortable" data-sort="int" title="The number of Common Vulnerability and Exposure (CVE) entries">CVE Count</th>
<th class="sortable" data-sort="string" title="The confidence rating dependency-check has for the identified CPE">CPE Confidence</th>
<th class="sortable" data-sort="int" title="The count of evidence used to identify the CPE">Evidence Count</th>
<th class="sortable" data-sort="int" title="The count of evidence collected to identify the CPE">Evidence Count</th>
</tr></thead>
#foreach($dependency in $dependencies)
#set($lnkcnt=$lnkcnt+1)
@@ -757,7 +757,7 @@ arising out of or in connection with the use of this tool, the analysis performe
#else
Medium
#end
<br/>CVSS Score: $vuln.cvssScore
<br/>CVSS Score: $vuln.cvssScore (AV:$enc.html($vuln.cvssAccessVector.substring(0,1))/AC:$enc.html($vuln.cvssAccessComplexity.substring(0,1))/Au:$enc.html($vuln.cvssAuthentication.substring(0,1))/C:$enc.html($vuln.cvssConfidentialityImpact.substring(0,1))/I:$enc.html($vuln.cvssIntegrityImpact.substring(0,1))/A:$enc.html($vuln.cvssAvailabilityImpact.substring(0,1)))
#if ($vuln.cwe)
<br/>CWE: $vuln.cwe
#end</p>

View File

@@ -16,9 +16,10 @@ limitations under the License.
Copyright (c) 2012 Jeremy Long. All Rights Reserved.
@author Jeremy Long <jeremy.long@owasp.org>
@version 1.1
@version 1.2
*#<?xml version="1.0"?>
<analysis xmlns="https://www.owasp.org/index.php/OWASP_Dependency_Check#1.2">
<analysis xmlns="https://jeremylong.github.io/DependencyCheck/dependency-check.1.3.xsd">
<scanInfo>
<engineVersion>$version</engineVersion>
#foreach($prop in $properties.getMetaData().entrySet())
@@ -68,8 +69,22 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
</relatedDependencies>
#end
<evidenceCollected>
#foreach($evidence in $dependency.getEvidenceForDisplay())
<evidence>
#foreach($evidence in $dependency.getVendorEvidence())
<evidence type="vendor" confidence="$enc.xml($evidence.getConfidence().toString())">
<source>$enc.xml($evidence.getSource())</source>
<name>$enc.xml($evidence.getName())</name>
<value>$enc.xml($evidence.getValue().trim())</value>
</evidence>
#end
#foreach($evidence in $dependency.getProductEvidence())
<evidence type="product" confidence="$enc.xml($evidence.getConfidence().toString())">
<source>$enc.xml($evidence.getSource())</source>
<name>$enc.xml($evidence.getName())</name>
<value>$enc.xml($evidence.getValue().trim())</value>
</evidence>
#end
#foreach($evidence in $dependency.getVersionEvidence())
<evidence type="version" confidence="$enc.xml($evidence.getConfidence().toString())">
<source>$enc.xml($evidence.getSource())</source>
<name>$enc.xml($evidence.getName())</name>
<value>$enc.xml($evidence.getValue().trim())</value>
@@ -108,6 +123,12 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<vulnerability>
<name>$enc.xml($vuln.name)</name>
<cvssScore>$vuln.cvssScore</cvssScore>
<cvssAccessVector>$enc.xml($vuln.cvssAccessVector)</cvssAccessVector>
<cvssAccessComplexity>$enc.xml($vuln.cvssAccessComplexity)</cvssAccessComplexity>
<cvssAuthenticationr>$enc.xml($vuln.cvssAuthentication)</cvssAuthenticationr>
<cvssConfidentialImpact>$enc.xml($vuln.cvssConfidentialityImpact)</cvssConfidentialImpact>
<cvssIntegrityImpact>$enc.xml($vuln.cvssIntegrityImpact)</cvssIntegrityImpact>
<cvssAvailabilityImpact>$enc.xml($vuln.cvssAvailabilityImpact)</cvssAvailabilityImpact>
#if ($vuln.cvssScore<4.0)
<severity>Low</severity>
#elseif ($vuln.cvssScore>=7.0)

View File

@@ -35,6 +35,15 @@ public class BaseTest {
@AfterClass
public static void tearDownClass() throws Exception {
File f = new File("./target/data/dc.h2.db");
if (f.exists() && f.isFile() && f.length() < 71680) {
System.err.println("------------------------------------------------");
System.err.println("------------------------------------------------");
System.err.println("I broke the build");
System.err.println("------------------------------------------------");
System.err.println("------------------------------------------------");
}
Settings.cleanup(true);
}

View File

@@ -26,13 +26,12 @@ import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.suppression.SuppressionParseException;
import org.owasp.dependencycheck.suppression.SuppressionRule;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.LoggerFactory;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
@@ -93,9 +92,9 @@ public class AbstractSuppressionAnalyzerTest extends BaseTest {
final String uri = this.getClass().getClassLoader().getResource("suppressions.xml").toURI().toURL().toString();
Settings.setString(Settings.KEYS.SUPPRESSION_FILE, uri);
} catch (URISyntaxException ex) {
Logger.getLogger(AbstractSuppressionAnalyzerTest.class.getName()).log(Level.SEVERE, null, ex);
LoggerFactory.getLogger(AbstractSuppressionAnalyzerTest.class).error("", ex);
} catch (MalformedURLException ex) {
Logger.getLogger(AbstractSuppressionAnalyzerTest.class.getName()).log(Level.SEVERE, null, ex);
LoggerFactory.getLogger(AbstractSuppressionAnalyzerTest.class).error("", ex);
}
}

View File

@@ -20,8 +20,7 @@ package org.owasp.dependencycheck.analyzer;
import java.io.File;
import java.util.HashSet;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.*;
import org.junit.Test;
import org.owasp.dependencycheck.BaseTest;
import org.owasp.dependencycheck.Engine;
@@ -39,7 +38,7 @@ public class ArchiveAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
* Test of getSupportedExtensions method, of class ArchiveAnalyzer.
*/
@Test
public void testGetSupportedExtensions() {
public void testSupportsExtensions() {
ArchiveAnalyzer instance = new ArchiveAnalyzer();
Set<String> expResult = new HashSet<String>();
expResult.add("zip");
@@ -52,8 +51,9 @@ public class ArchiveAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
expResult.add("tar");
expResult.add("gz");
expResult.add("tgz");
Set result = instance.getSupportedExtensions();
assertEquals(expResult, result);
for (String ext : expResult) {
assertTrue(ext, instance.accept(new File("test." + ext)));
}
}
/**
@@ -72,28 +72,9 @@ public class ArchiveAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
*/
@Test
public void testSupportsExtension() {
String extension = "7z"; //not supported
String extension = "test.7z"; //not supported
ArchiveAnalyzer instance = new ArchiveAnalyzer();
boolean expResult = false;
boolean result = instance.supportsExtension(extension);
assertEquals(expResult, result);
extension = "war"; //supported
expResult = true;
result = instance.supportsExtension(extension);
assertEquals(expResult, result);
extension = "ear"; //supported
result = instance.supportsExtension(extension);
assertEquals(expResult, result);
extension = "zip"; //supported
result = instance.supportsExtension(extension);
assertEquals(expResult, result);
extension = "nupkg"; //supported
result = instance.supportsExtension(extension);
assertEquals(expResult, result);
assertFalse(extension, instance.accept(new File(extension)));
}
/**
@@ -113,6 +94,8 @@ public class ArchiveAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
@Test
public void testInitialize() throws Exception {
ArchiveAnalyzer instance = new ArchiveAnalyzer();
instance.setEnabled(true);
instance.setFilesMatched(true);
instance.initialize();
instance.close();
@@ -127,7 +110,7 @@ public class ArchiveAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
public void testAnalyze() throws Exception {
ArchiveAnalyzer instance = new ArchiveAnalyzer();
//trick the analyzer into thinking it is active.
instance.supportsExtension("ear");
instance.accept(new File("test.ear"));
try {
instance.initialize();
File file = BaseTest.getResourceAsFile(this, "daytrader-ear-2.1.7.ear");
@@ -158,7 +141,7 @@ public class ArchiveAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
public void testAnalyzeTar() throws Exception {
ArchiveAnalyzer instance = new ArchiveAnalyzer();
//trick the analyzer into thinking it is active so that it will initialize
instance.supportsExtension("tar");
instance.accept(new File("test.tar"));
try {
instance.initialize();
@@ -189,7 +172,7 @@ public class ArchiveAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
@Test
public void testAnalyzeTarGz() throws Exception {
ArchiveAnalyzer instance = new ArchiveAnalyzer();
instance.supportsExtension("zip"); //ensure analyzer is "enabled"
instance.accept(new File("zip")); //ensure analyzer is "enabled"
try {
instance.initialize();
@@ -242,7 +225,7 @@ public class ArchiveAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
@Test
public void testAnalyzeTgz() throws Exception {
ArchiveAnalyzer instance = new ArchiveAnalyzer();
instance.supportsExtension("zip"); //ensure analyzer is "enabled"
instance.accept(new File("zip")); //ensure analyzer is "enabled"
try {
instance.initialize();

View File

@@ -18,8 +18,6 @@
package org.owasp.dependencycheck.analyzer;
import java.io.File;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.junit.After;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@@ -34,6 +32,8 @@ import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Evidence;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Tests for the AssemblyAnalyzer.
@@ -43,7 +43,9 @@ import org.owasp.dependencycheck.utils.Settings;
*/
public class AssemblyAnalyzerTest extends BaseTest {
private static final Logger LOGGER = Logger.getLogger(AssemblyAnalyzerTest.class.getName());
private static final Logger LOGGER = LoggerFactory.getLogger(AssemblyAnalyzerTest.class);
private static final String LOG_KEY = "org.slf4j.simpleLogger.org.owasp.dependencycheck.analyzer.AssemblyAnalyzer";
AssemblyAnalyzer analyzer;
@@ -56,13 +58,13 @@ public class AssemblyAnalyzerTest extends BaseTest {
public void setUp() throws Exception {
try {
analyzer = new AssemblyAnalyzer();
analyzer.supportsExtension("dll");
analyzer.accept(new File("test.dll")); // trick into "thinking it is active"
analyzer.initialize();
} catch (Exception e) {
if (e.getMessage().contains("Could not execute .NET AssemblyAnalyzer")) {
LOGGER.log(Level.WARNING, "Exception setting up AssemblyAnalyzer. Tests will be incomplete");
LOGGER.warn("Exception setting up AssemblyAnalyzer. Tests will be incomplete");
} else {
LOGGER.log(Level.WARNING, "Exception setting up AssemblyAnalyzer. Tests will be incomplete", e);
LOGGER.warn("Exception setting up AssemblyAnalyzer. Tests will be incomplete", e);
}
Assume.assumeNoException("Is mono installed? TESTS WILL BE INCOMPLETE", e);
}
@@ -113,11 +115,8 @@ public class AssemblyAnalyzerTest extends BaseTest {
@Test
public void testNonexistent() {
Level oldLevel = Logger.getLogger(AssemblyAnalyzer.class.getName()).getLevel();
Level oldDependency = Logger.getLogger(Dependency.class.getName()).getLevel();
// Tweak the log level so the warning doesn't show in the console
Logger.getLogger(AssemblyAnalyzer.class.getName()).setLevel(Level.OFF);
Logger.getLogger(Dependency.class.getName()).setLevel(Level.OFF);
String oldProp = System.getProperty(LOG_KEY, "info");
//File f = new File(AssemblyAnalyzerTest.class.getClassLoader().getResource("log4net.dll").getPath());
File f = BaseTest.getResourceAsFile(this, "log4net.dll");
File test = new File(f.getParent(), "nonexistent.dll");
@@ -129,8 +128,7 @@ public class AssemblyAnalyzerTest extends BaseTest {
} catch (AnalysisException ae) {
assertEquals("File does not exist", ae.getMessage());
} finally {
Logger.getLogger(AssemblyAnalyzer.class.getName()).setLevel(oldLevel);
Logger.getLogger(Dependency.class.getName()).setLevel(oldDependency);
System.setProperty(LOG_KEY, oldProp);
}
}
@@ -151,20 +149,20 @@ public class AssemblyAnalyzerTest extends BaseTest {
Settings.setString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, "/yooser/bine/mono");
}
Level oldLevel = Logger.getLogger(AssemblyAnalyzer.class.getName()).getLevel();
String oldProp = System.getProperty(LOG_KEY, "info");
try {
// Tweak the logging to swallow the warning when testing
Logger.getLogger(AssemblyAnalyzer.class.getName()).setLevel(Level.OFF);
System.setProperty(LOG_KEY, "error");
// Have to make a NEW analyzer because during setUp, it would have gotten the correct one
AssemblyAnalyzer aanalyzer = new AssemblyAnalyzer();
aanalyzer.supportsExtension("dll");
aanalyzer.accept(new File("test.dll")); // trick into "thinking it is active"
aanalyzer.initialize();
fail("Expected an AnalysisException");
} catch (AnalysisException ae) {
assertEquals("An error occured with the .NET AssemblyAnalyzer", ae.getMessage());
} finally {
System.setProperty(LOG_KEY, oldProp);
// Recover the logger
Logger.getLogger(AssemblyAnalyzer.class.getName()).setLevel(oldLevel);
// Now recover the way we came in. If we had to set a System property, delete it. Otherwise,
// reset the old value
if (oldValue == null) {

View File

@@ -0,0 +1,176 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 Institute for Defense Analyses. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.owasp.dependencycheck.BaseTest;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import java.io.File;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Unit tests for AutoconfAnalyzer. The test resources under autoconf/ were
* obtained from outside open source software projects. Links to those projects
* are given below.
*
* @author Dale Visser <dvisser@ida.org>
* @see <a href="http://readable.sourceforge.net/">Readable Lisp S-expressions
* Project</a>
* @see <a href="https://gnu.org/software/binutils/">GNU Binutils</a>
* @see <a href="https://gnu.org/software/ghostscript/">GNU Ghostscript</a>
*/
public class AutoconfAnalyzerTest extends BaseTest {
/**
* The analyzer to test.
*/
AutoconfAnalyzer analyzer;
private void assertCommonEvidence(Dependency result, String product,
String version, String vendor) {
assertProductAndVersion(result, product, version);
assertTrue("Expected vendor evidence to contain \"" + vendor + "\".",
result.getVendorEvidence().toString().contains(vendor));
}
private void assertProductAndVersion(Dependency result, String product,
String version) {
assertTrue("Expected product evidence to contain \"" + product + "\".",
result.getProductEvidence().toString().contains(product));
assertTrue("Expected version evidence to contain \"" + version + "\".",
result.getVersionEvidence().toString().contains(version));
}
/**
* Correctly setup the analyzer for testing.
*
* @throws Exception
* thrown if there is a problem
*/
@Before
public void setUp() throws Exception {
analyzer = new AutoconfAnalyzer();
analyzer.setFilesMatched(true);
analyzer.initialize();
}
/**
* Cleanup the analyzer's temp files, etc.
*
* @throws Exception
* thrown if there is a problem
*/
@After
public void tearDown() throws Exception {
analyzer.close();
analyzer = null;
}
/**
* Test whether expected evidence is gathered from Ghostscript's
* configure.ac.
*
* @throws AnalysisException
* is thrown when an exception occurs.
*/
@Test
public void testAnalyzeConfigureAC1() throws AnalysisException {
final Dependency result = new Dependency(BaseTest.getResourceAsFile(
this, "autoconf/ghostscript/configure.ac"));
analyzer.analyze(result, null);
assertCommonEvidence(result, "ghostscript", "8.62.0", "gnu");
}
/**
* Test whether expected evidence is gathered from Readable's configure.ac.
*
* @throws AnalysisException
* is thrown when an exception occurs.
*/
@Test
public void testAnalyzeConfigureAC2() throws AnalysisException {
final Dependency result = new Dependency(BaseTest.getResourceAsFile(
this, "autoconf/readable-code/configure.ac"));
analyzer.analyze(result, null);
assertReadableCodeEvidence(result);
}
private void assertReadableCodeEvidence(final Dependency result) {
assertCommonEvidence(result, "readable", "1.0.7", "dwheeler");
final String url = "http://readable.sourceforge.net/";
assertTrue("Expected product evidence to contain \"" + url + "\".",
result.getVendorEvidence().toString().contains(url));
}
/**
* Test whether expected evidence is gathered from GNU Binutil's configure.
*
* @throws AnalysisException
* is thrown when an exception occurs.
*/
@Test
public void testAnalyzeConfigureScript() throws AnalysisException {
final Dependency result = new Dependency(BaseTest.getResourceAsFile(
this, "autoconf/binutils/configure"));
analyzer.analyze(result, null);
assertProductAndVersion(result, "binutils", "2.25.51");
}
/**
* Test whether expected evidence is gathered from GNU Ghostscript's
* configure.
*
* @throws AnalysisException
* is thrown when an exception occurs.
*/
@Test
public void testAnalyzeReadableConfigureScript() throws AnalysisException {
final Dependency result = new Dependency(BaseTest.getResourceAsFile(
this, "autoconf/readable-code/configure"));
analyzer.analyze(result, null);
assertReadableCodeEvidence(result);
}
/**
* Test of getName method, of {@link AutoconfAnalyzer}.
*/
@Test
public void testGetName() {
assertEquals("Analyzer name wrong.", "Autoconf Analyzer",
analyzer.getName());
}
/**
* Test of {@link AutoconfAnalyzer#accept(File)}.
*/
@Test
public void testSupportsFileExtension() {
assertTrue("Should support \"ac\" extension.",
analyzer.accept(new File("configure.ac")));
assertTrue("Should support \"in\" extension.",
analyzer.accept(new File("configure.in")));
assertTrue("Should support \"configure\" extension.",
analyzer.accept(new File("configure")));
}
}

View File

@@ -0,0 +1,152 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 Institute for Defense Analyses. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.owasp.dependencycheck.BaseTest;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.dependency.Dependency;
import java.io.File;
import java.util.List;
import java.util.regex.Pattern;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.*;
import org.owasp.dependencycheck.data.nvdcve.BaseDBTestCase;
/**
* Unit tests for CmakeAnalyzer.
*
* @author Dale Visser <dvisser@ida.org>
*/
public class CMakeAnalyzerTest extends BaseDBTestCase {
/**
* The package analyzer to test.
*/
CMakeAnalyzer analyzer;
/**
* Setup the CmakeAnalyzer.
*
* @throws Exception if there is a problem
*/
@Before
public void setUp() throws Exception {
super.setUp();
analyzer = new CMakeAnalyzer();
analyzer.setFilesMatched(true);
analyzer.initialize();
}
/**
* Cleanup any resources used.
*
* @throws Exception if there is a problem
*/
@After
public void tearDown() throws Exception {
analyzer.close();
analyzer = null;
}
/**
* Test of getName method, of class PythonPackageAnalyzer.
*/
@Test
public void testGetName() {
assertThat(analyzer.getName(), is(equalTo("CMake Analyzer")));
}
/**
* Test of supportsExtension method, of class PythonPackageAnalyzer.
*/
@Test
public void testAccept() {
assertTrue("Should support \"CMakeLists.txt\" name.",
analyzer.accept(new File("CMakeLists.txt")));
assertTrue("Should support \"cmake\" extension.",
analyzer.accept(new File("test.cmake")));
}
/**
* Test whether expected evidence is gathered from OpenCV's CMakeLists.txt.
*
* @throws AnalysisException is thrown when an exception occurs.
*/
@Test
public void testAnalyzeCMakeListsOpenCV() throws AnalysisException {
final Dependency result = new Dependency(BaseTest.getResourceAsFile(
this, "cmake/opencv/CMakeLists.txt"));
analyzer.analyze(result, null);
final String product = "OpenCV";
assertProductEvidence(result, product);
}
/**
* Test whether expected evidence is gathered from OpenCV's CMakeLists.txt.
*
* @throws AnalysisException is thrown when an exception occurs.
*/
@Test
public void testAnalyzeCMakeListsZlib() throws AnalysisException {
final Dependency result = new Dependency(BaseTest.getResourceAsFile(
this, "cmake/zlib/CMakeLists.txt"));
analyzer.analyze(result, null);
final String product = "zlib";
assertProductEvidence(result, product);
}
private void assertProductEvidence(Dependency result, String product) {
assertTrue("Expected product evidence to contain \"" + product + "\".",
result.getProductEvidence().toString().contains(product));
}
/**
* Test whether expected version evidence is gathered from OpenCV's third party cmake files.
*
* @throws AnalysisException is thrown when an exception occurs.
*/
@Test
public void testAnalyzeCMakeListsOpenCV3rdParty() throws AnalysisException, DatabaseException {
final Dependency result = new Dependency(BaseTest.getResourceAsFile(
this, "cmake/opencv/3rdparty/ffmpeg/ffmpeg_version.cmake"));
final Engine engine = new Engine();
analyzer.analyze(result, engine);
assertProductEvidence(result, "libavcodec");
assertVersionEvidence(result, "55.18.102");
assertFalse("ALIASOF_ prefix shouldn't be present.",
Pattern.compile("\\bALIASOF_\\w+").matcher(result.getProductEvidence().toString()).find());
final List<Dependency> dependencies = engine.getDependencies();
assertEquals("Number of additional dependencies should be 4.", 4, dependencies.size());
final Dependency last = dependencies.get(3);
assertProductEvidence(last, "libavresample");
assertVersionEvidence(last, "1.0.1");
}
private void assertVersionEvidence(Dependency result, String version) {
assertTrue("Expected version evidence to contain \"" + version + "\".",
result.getVersionEvidence().toString().contains(version));
}
}

View File

@@ -94,9 +94,9 @@ public class CPEAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
try {
//callDetermineCPE_full("struts2-core-2.3.16.3.jar", "cpe:/a:apache:struts:2.3.16.3", instance, fnAnalyzer, jarAnalyzer, hAnalyzer, fp);
callDetermineCPE_full("hazelcast-2.5.jar", null, instance, fnAnalyzer, jarAnalyzer, hAnalyzer, fp);
callDetermineCPE_full("spring-context-support-2.5.5.jar", "cpe:/a:vmware:springsource_spring_framework:2.5.5", instance, fnAnalyzer, jarAnalyzer, hAnalyzer, fp);
callDetermineCPE_full("spring-context-support-2.5.5.jar", "cpe:/a:springsource:spring_framework:2.5.5", instance, fnAnalyzer, jarAnalyzer, hAnalyzer, fp);
callDetermineCPE_full("spring-core-3.0.0.RELEASE.jar", "cpe:/a:vmware:springsource_spring_framework:3.0.0", instance, fnAnalyzer, jarAnalyzer, hAnalyzer, fp);
callDetermineCPE_full("org.mortbay.jetty.jar", "cpe:/a:mortbay_jetty:jetty:4.2", instance, fnAnalyzer, jarAnalyzer, hAnalyzer, fp);
callDetermineCPE_full("org.mortbay.jetty.jar", "cpe:/a:mortbay_jetty:jetty:4.2.27", instance, fnAnalyzer, jarAnalyzer, hAnalyzer, fp);
callDetermineCPE_full("jaxb-xercesImpl-1.5.jar", null, instance, fnAnalyzer, jarAnalyzer, hAnalyzer, fp);
callDetermineCPE_full("ehcache-core-2.2.0.jar", null, instance, fnAnalyzer, jarAnalyzer, hAnalyzer, fp);
} finally {
@@ -147,23 +147,29 @@ public class CPEAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
FileNameAnalyzer fnAnalyzer = new FileNameAnalyzer();
fnAnalyzer.analyze(struts, null);
HintAnalyzer hintAnalyzer = new HintAnalyzer();
JarAnalyzer jarAnalyzer = new JarAnalyzer();
jarAnalyzer.analyze(struts, null);
jarAnalyzer.accept(new File("test.jar"));//trick analyzer into "thinking it is active"
jarAnalyzer.analyze(struts, null);
hintAnalyzer.analyze(struts, null);
//File fileCommonValidator = new File(this.getClass().getClassLoader().getResource("commons-validator-1.4.0.jar").getPath());
File fileCommonValidator = BaseTest.getResourceAsFile(this, "commons-validator-1.4.0.jar");
Dependency commonValidator = new Dependency(fileCommonValidator);
jarAnalyzer.analyze(commonValidator, null);
hintAnalyzer.analyze(commonValidator, null);
//File fileSpring = new File(this.getClass().getClassLoader().getResource("spring-core-2.5.5.jar").getPath());
File fileSpring = BaseTest.getResourceAsFile(this, "spring-core-2.5.5.jar");
Dependency spring = new Dependency(fileSpring);
jarAnalyzer.analyze(spring, null);
hintAnalyzer.analyze(spring, null);
//File fileSpring3 = new File(this.getClass().getClassLoader().getResource("spring-core-3.0.0.RELEASE.jar").getPath());
File fileSpring3 = BaseTest.getResourceAsFile(this, "spring-core-3.0.0.RELEASE.jar");
Dependency spring3 = new Dependency(fileSpring3);
jarAnalyzer.analyze(spring3, null);
hintAnalyzer.analyze(spring3, null);
CPEAnalyzer instance = new CPEAnalyzer();
instance.open();
@@ -189,6 +195,7 @@ public class CPEAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
//the following two only work if the HintAnalyzer is used.
//Assert.assertTrue("Incorrect match size - spring", spring.getIdentifiers().size() == 1);
//Assert.assertTrue("Incorrect match - spring", spring.getIdentifiers().get(0).getValue().equals(expResultSpring));
jarAnalyzer.close();
}
/**

Some files were not shown because too many files have changed in this diff Show More