mirror of
https://github.com/ysoftdevs/DependencyCheck.git
synced 2026-01-14 07:43:40 +01:00
Compare commits
275 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e246757f47 | ||
|
|
4172300799 | ||
|
|
f39f754b7b | ||
|
|
c59615f452 | ||
|
|
847bed2fa0 | ||
|
|
a9af15f6f8 | ||
|
|
92519ae955 | ||
|
|
2d90aca1f2 | ||
|
|
f29ed38c34 | ||
|
|
df8d4fd77c | ||
|
|
baa2e2c6ff | ||
|
|
9d5769bb69 | ||
|
|
4cdfa804ee | ||
|
|
523cd23b6b | ||
|
|
61866e9e76 | ||
|
|
ff7fbdc98d | ||
|
|
b625d642ea | ||
|
|
8733a85ebb | ||
|
|
5ab5a7b72b | ||
|
|
3cb8b9fa9e | ||
|
|
429039bf1c | ||
|
|
29d28c3408 | ||
|
|
372d484440 | ||
|
|
eac47800a3 | ||
|
|
86a85db12b | ||
|
|
4ab6cd278c | ||
|
|
233a068c8b | ||
|
|
d9f0ffa742 | ||
|
|
8d63ee19ed | ||
|
|
1fb74e1a27 | ||
|
|
c94ab6108c | ||
|
|
bf285e19ab | ||
|
|
b1ceca73e4 | ||
|
|
f3aca63b61 | ||
|
|
fca107d287 | ||
|
|
64b6964fff | ||
|
|
6af0842838 | ||
|
|
4c49adf1ba | ||
|
|
5f4e4fab56 | ||
|
|
146d7e3fbf | ||
|
|
4d22800747 | ||
|
|
541a7f8180 | ||
|
|
f205cf79c9 | ||
|
|
d8bb6488b7 | ||
|
|
4324563c0a | ||
|
|
bad03660b1 | ||
|
|
20b1ff38f9 | ||
|
|
def78a3cfd | ||
|
|
a41158a716 | ||
|
|
63ad13ff7a | ||
|
|
dd92ec675f | ||
|
|
6e1512f7d9 | ||
|
|
287b1df3fd | ||
|
|
38bf9b4ddb | ||
|
|
f9d3a9d8d8 | ||
|
|
309a5d9bcb | ||
|
|
60e661d3a4 | ||
|
|
c33257d266 | ||
|
|
1dbc183567 | ||
|
|
bf258146da | ||
|
|
bb927b447e | ||
|
|
d91b4c3151 | ||
|
|
91dbb39f18 | ||
|
|
35ae8fd660 | ||
|
|
d854917090 | ||
|
|
32ebf6c8ed | ||
|
|
edd4191d47 | ||
|
|
0cce49506a | ||
|
|
1c053469e9 | ||
|
|
610e97ef7f | ||
|
|
5a678d2ccb | ||
|
|
8db61a4d1e | ||
|
|
f47c6b07f4 | ||
|
|
bd3af45db9 | ||
|
|
a271d422f6 | ||
|
|
4dd6dedaa4 | ||
|
|
10ee569096 | ||
|
|
1474855305 | ||
|
|
0202bc11d4 | ||
|
|
e7072ea04c | ||
|
|
8f2c755f21 | ||
|
|
e513a79bd2 | ||
|
|
dd17f7393f | ||
|
|
32f38bf892 | ||
|
|
d5c3eeaf28 | ||
|
|
bfa67fcba7 | ||
|
|
37a556dcc0 | ||
|
|
fe61f298f0 | ||
|
|
9786c9bf82 | ||
|
|
668161081a | ||
|
|
4978f9dcba | ||
|
|
a6ca2e3895 | ||
|
|
6ecf55be91 | ||
|
|
13bd63dac8 | ||
|
|
db5ff1bfca | ||
|
|
42f2385bb2 | ||
|
|
e9556bbbf0 | ||
|
|
316b936326 | ||
|
|
6838b9b950 | ||
|
|
cdfe5d0c9a | ||
|
|
1610f14c47 | ||
|
|
85ab894b94 | ||
|
|
ddbca24f33 | ||
|
|
6b9acac8c4 | ||
|
|
2333bee5fd | ||
|
|
2ad08d2367 | ||
|
|
1337686013 | ||
|
|
41041bfd18 | ||
|
|
e693e53630 | ||
|
|
b99e13a337 | ||
|
|
3bbc485968 | ||
|
|
e0b549e427 | ||
|
|
75207169e3 | ||
|
|
e07f568237 | ||
|
|
e2cd99d40d | ||
|
|
27f2682a98 | ||
|
|
34a2110e9a | ||
|
|
96ba51db4f | ||
|
|
9c6053a60a | ||
|
|
358367ef9e | ||
|
|
a12bc44ecd | ||
|
|
773ac019f8 | ||
|
|
e751b7b814 | ||
|
|
824aa23b9b | ||
|
|
b7b97960a6 | ||
|
|
40f0e907e1 | ||
|
|
5ff0dc885d | ||
|
|
e70a0ee238 | ||
|
|
9338697079 | ||
|
|
4018a4e1de | ||
|
|
e8788dd2a4 | ||
|
|
e70c2f2b05 | ||
|
|
5ed0583039 | ||
|
|
f76d7295f9 | ||
|
|
6e280c4958 | ||
|
|
48b4ef1944 | ||
|
|
9150df964f | ||
|
|
b2237394e1 | ||
|
|
b3a0f7ad26 | ||
|
|
782ba42abc | ||
|
|
74b93ce602 | ||
|
|
e907c40f17 | ||
|
|
13a9dedb1e | ||
|
|
b37698f245 | ||
|
|
d30d000346 | ||
|
|
446239a5bd | ||
|
|
ac25aa795b | ||
|
|
f117a9ded0 | ||
|
|
947d38ccd2 | ||
|
|
23f7996db8 | ||
|
|
9fdff51f26 | ||
|
|
9b43bf004a | ||
|
|
5d73faa1f0 | ||
|
|
9e70279b31 | ||
|
|
9e671d1065 | ||
|
|
7e2c4af0b3 | ||
|
|
11f9092a65 | ||
|
|
6017e5c217 | ||
|
|
b2149ff4b9 | ||
|
|
1a5177c576 | ||
|
|
7020c9931a | ||
|
|
9bc43e2e8e | ||
|
|
26a4e7451e | ||
|
|
3470d33bdc | ||
|
|
51c96894b4 | ||
|
|
7fc2be6a0a | ||
|
|
110c97bc15 | ||
|
|
8d51d8fa1f | ||
|
|
4b02a567e0 | ||
|
|
5a939ec108 | ||
|
|
d9c4480627 | ||
|
|
9388340e23 | ||
|
|
2285d2ef4b | ||
|
|
f84aea0040 | ||
|
|
452969cc92 | ||
|
|
128a600f18 | ||
|
|
7dd9a52e78 | ||
|
|
ff341b7228 | ||
|
|
92a8b4ca85 | ||
|
|
384199b28d | ||
|
|
44edcabe15 | ||
|
|
1a5e9884fc | ||
|
|
cda81315d2 | ||
|
|
d7100e54d1 | ||
|
|
989caead9c | ||
|
|
a9d3b627f1 | ||
|
|
99a1606df1 | ||
|
|
6326513c63 | ||
|
|
f6cfae595a | ||
|
|
0794efcf41 | ||
|
|
b9ea82f2c1 | ||
|
|
8b705b3370 | ||
|
|
c684607a4d | ||
|
|
b00833c2de | ||
|
|
0ca6bc6ab6 | ||
|
|
60faddff9b | ||
|
|
b35da8ad4b | ||
|
|
79887c148a | ||
|
|
1ae3457ee6 | ||
|
|
d2154c9d29 | ||
|
|
40ede24a99 | ||
|
|
5960ba919d | ||
|
|
f6aaaa8815 | ||
|
|
6f1b20c936 | ||
|
|
7734a50427 | ||
|
|
aef118d375 | ||
|
|
22cae71999 | ||
|
|
29d127303c | ||
|
|
5574f1c24f | ||
|
|
9457744571 | ||
|
|
19243c479c | ||
|
|
e868ce8328 | ||
|
|
ffa846c05a | ||
|
|
dde1791476 | ||
|
|
45438a7f06 | ||
|
|
c980e77ea3 | ||
|
|
176d3ddefa | ||
|
|
98d783d448 | ||
|
|
bcd6634d8a | ||
|
|
0b260cef2a | ||
|
|
6a68abbd67 | ||
|
|
9fcf23c802 | ||
|
|
5c2c08e051 | ||
|
|
1f254997e1 | ||
|
|
4f95af0864 | ||
|
|
6ff39be9d2 | ||
|
|
6cf5a47971 | ||
|
|
56da53c700 | ||
|
|
7091e10795 | ||
|
|
34765c5741 | ||
|
|
36c139872a | ||
|
|
1e77cec677 | ||
|
|
e95e3fb2d0 | ||
|
|
39c2234e38 | ||
|
|
f4fff5d9cb | ||
|
|
659785f972 | ||
|
|
85c04f6e3e | ||
|
|
bef117cbe8 | ||
|
|
46dd7cf86e | ||
|
|
9ed5a97267 | ||
|
|
cc2da70db2 | ||
|
|
cedd93e774 | ||
|
|
632e1692eb | ||
|
|
4861592d2a | ||
|
|
22e6d4edf3 | ||
|
|
e9bd7ff72f | ||
|
|
e7228fb489 | ||
|
|
96c03a68f2 | ||
|
|
4f6f248421 | ||
|
|
a8f14c86fd | ||
|
|
36de3d1e25 | ||
|
|
48bc4570e1 | ||
|
|
94b272dbae | ||
|
|
c093edf459 | ||
|
|
0164feffcc | ||
|
|
8cd377b99f | ||
|
|
74282c8ac5 | ||
|
|
d2158e5e44 | ||
|
|
9ea16ad1d1 | ||
|
|
45941adb71 | ||
|
|
c4d662fd2b | ||
|
|
d9ce3cda66 | ||
|
|
2fa8507d69 | ||
|
|
00d4ee47de | ||
|
|
413c71eb0a | ||
|
|
2b761279e4 | ||
|
|
1e7bbfa7c1 | ||
|
|
dc7245ff6e | ||
|
|
ffaf7b40e9 | ||
|
|
99355d993a | ||
|
|
d25f6e813c | ||
|
|
043f8e0523 | ||
|
|
5fcf2a2623 | ||
|
|
f1422adf75 | ||
|
|
c2b1742582 |
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*.html linguist-documentation
|
||||
(^|/)site/) linguist-documentation
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -26,4 +26,5 @@ _site/**
|
||||
.LCKpom.xml~
|
||||
#coverity
|
||||
/cov-int/
|
||||
/dependency-check-core/nbproject/
|
||||
/dependency-check-core/nbproject/
|
||||
cov-scan.bat
|
||||
@@ -1,2 +1,3 @@
|
||||
language: java
|
||||
jdk: oraclejdk7
|
||||
script: mvn install -DreleaseTesting
|
||||
|
||||
14
Dockerfile
Normal file
14
Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM java:8
|
||||
|
||||
MAINTAINER Timo Pagel <dependencycheckmaintainer@timo-pagel.de>
|
||||
|
||||
RUN wget -O /tmp/current.txt http://jeremylong.github.io/DependencyCheck/current.txt && current=$(cat /tmp/current.txt) && wget https://dl.bintray.com/jeremy-long/owasp/dependency-check-$current-release.zip && unzip dependency-check-$current-release.zip && mv dependency-check /usr/share/
|
||||
|
||||
RUN useradd -ms /bin/bash dockeruser && chown -R dockeruser:dockeruser /usr/share/dependency-check && mkdir /report && chown -R dockeruser:dockeruser /report
|
||||
USER dockeruser
|
||||
|
||||
VOLUME "/src /usr/share/dependency-check/data /report"
|
||||
|
||||
WORKDIR /report
|
||||
|
||||
ENTRYPOINT ["/usr/share/dependency-check/bin/dependency-check.sh", "--scan", "/src"]
|
||||
34
README.md
34
README.md
@@ -1,4 +1,5 @@
|
||||
[](https://travis-ci.org/jeremylong/DependencyCheck) [](https://www.apache.org/licenses/LICENSE-2.0.txt)
|
||||
[](https://travis-ci.org/jeremylong/DependencyCheck) [](https://www.apache.org/licenses/LICENSE-2.0.txt) [](https://scan.coverity.com/projects/dependencycheck)
|
||||
|
||||
Dependency-Check
|
||||
================
|
||||
|
||||
@@ -96,6 +97,37 @@ On Windows
|
||||
|
||||
Then load the resulting 'DependencyCheck-Report.html' into your favorite browser.
|
||||
|
||||
### Docker
|
||||
|
||||
In the following example it is assumed that the source to be checked is in the actual directory. A persistent data directory and a persistent report directory is used so that the container can be destroyed after running it to make sure that you use the newst version, always.
|
||||
```
|
||||
# After the first run, feel free to change the owner of the directories to the owner of the creted files and the permissions to 744
|
||||
DATA_DIRECTORY=$HOME/OWASP-Dependency-Check/data
|
||||
REPORT_DIRECTORY=/$HOME/OWASP-Dependency-Check/reports
|
||||
|
||||
if [ ! -d $DATA_DIRECTORY ]; then
|
||||
echo "Initially creating persistent directories"
|
||||
mkdir -p $DATA_DIRECTORY
|
||||
chmod -R 777 $DATA_DIRECTORY
|
||||
|
||||
mkdir -p $REPORT_DIRECTORY
|
||||
chmod -R 777 $REPORT_DIRECTORY
|
||||
fi
|
||||
|
||||
docker pull owasp/dependency-check # Make sure it is the actual version
|
||||
|
||||
docker run --rm \
|
||||
--volume $(pwd):/src \
|
||||
--volume $DATA_DIRECTORY:/usr/share/dependency-check/data \
|
||||
--volume $REPORT_DIRECTORY:/report \
|
||||
--name dependency-check \
|
||||
dc \
|
||||
--suppression "/src/security/dependency-check-suppression.xml"\
|
||||
--format "ALL" \
|
||||
--project "My OWASP Dependency Check Projekt" \
|
||||
```
|
||||
|
||||
|
||||
Mailing List
|
||||
------------
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>1.4.2</version>
|
||||
<version>1.4.5</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-ant</artifactId>
|
||||
|
||||
@@ -24,16 +24,21 @@ import org.slf4j.helpers.MarkerIgnoringBase;
|
||||
import org.slf4j.helpers.MessageFormatter;
|
||||
|
||||
/**
|
||||
* An instance of {@link org.slf4j.Logger} which simply calls the log method on the delegate Ant task.
|
||||
* An instance of {@link org.slf4j.Logger} which simply calls the log method on
|
||||
* the delegate Ant task.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
public class AntLoggerAdapter extends MarkerIgnoringBase {
|
||||
|
||||
/**
|
||||
* serialization UID.
|
||||
*/
|
||||
private static final long serialVersionUID = -1337;
|
||||
/**
|
||||
* A reference to the Ant task used for logging.
|
||||
*/
|
||||
private Task task;
|
||||
private transient Task task;
|
||||
|
||||
/**
|
||||
* Constructs an Ant Logger Adapter.
|
||||
|
||||
@@ -346,6 +346,28 @@ public class Check extends Update {
|
||||
public void setSuppressionFile(String suppressionFile) {
|
||||
this.suppressionFile = suppressionFile;
|
||||
}
|
||||
/**
|
||||
* The path to the suppression file.
|
||||
*/
|
||||
private String hintsFile;
|
||||
|
||||
/**
|
||||
* Get the value of hintsFile.
|
||||
*
|
||||
* @return the value of hintsFile
|
||||
*/
|
||||
public String getHintsFile() {
|
||||
return hintsFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of hintsFile.
|
||||
*
|
||||
* @param hintsFile new value of hintsFile
|
||||
*/
|
||||
public void setHintsFile(String hintsFile) {
|
||||
this.hintsFile = hintsFile;
|
||||
}
|
||||
/**
|
||||
* flag indicating whether or not to show a summary of findings.
|
||||
*/
|
||||
@@ -546,6 +568,102 @@ public class Check extends Update {
|
||||
public void setCMakeAnalyzerEnabled(Boolean cmakeAnalyzerEnabled) {
|
||||
this.cmakeAnalyzerEnabled = cmakeAnalyzerEnabled;
|
||||
}
|
||||
|
||||
//start changes
|
||||
/**
|
||||
* Whether or not the Ruby Bundle Audit Analyzer is enabled.
|
||||
*/
|
||||
private Boolean bundleAuditAnalyzerEnabled;
|
||||
|
||||
/**
|
||||
* Returns if the Bundle Audit Analyzer is enabled.
|
||||
*
|
||||
* @return if the Bundle Audit Analyzer is enabled.
|
||||
*/
|
||||
public Boolean isBundleAuditAnalyzerEnabled() {
|
||||
return bundleAuditAnalyzerEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets if the Bundle Audit Analyzer is enabled.
|
||||
*
|
||||
* @param bundleAuditAnalyzerEnabled whether or not the analyzer should be
|
||||
* enabled
|
||||
*/
|
||||
public void setBundleAuditAnalyzerEnabled(Boolean bundleAuditAnalyzerEnabled) {
|
||||
this.bundleAuditAnalyzerEnabled = bundleAuditAnalyzerEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the path for the bundle-audit binary.
|
||||
*/
|
||||
private String bundleAuditPath;
|
||||
|
||||
/**
|
||||
* Returns the path to the bundle audit executable.
|
||||
*
|
||||
* @return the path to the bundle audit executable
|
||||
*/
|
||||
public String getBundleAuditPath() {
|
||||
return bundleAuditPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the path to the bundle audit executable.
|
||||
*
|
||||
* @param bundleAuditPath the path to the bundle audit executable
|
||||
*/
|
||||
public void setBundleAuditPath(String bundleAuditPath) {
|
||||
this.bundleAuditPath = bundleAuditPath;
|
||||
}
|
||||
/**
|
||||
* Whether or not the CocoaPods Analyzer is enabled.
|
||||
*/
|
||||
private Boolean cocoapodsAnalyzerEnabled;
|
||||
|
||||
/**
|
||||
* Returns if the cocoapods analyyzer is enabled.
|
||||
*
|
||||
* @return if the cocoapods analyyzer is enabled
|
||||
*/
|
||||
public boolean isCocoapodsAnalyzerEnabled() {
|
||||
return cocoapodsAnalyzerEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets whether or not the cocoapods analyzer is enabled.
|
||||
*
|
||||
* @param cocoapodsAnalyzerEnabled the state of the cocoapods analyzer
|
||||
*/
|
||||
public void setCocoapodsAnalyzerEnabled(Boolean cocoapodsAnalyzerEnabled) {
|
||||
this.cocoapodsAnalyzerEnabled = cocoapodsAnalyzerEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether or not the Swift package Analyzer is enabled.
|
||||
*/
|
||||
private Boolean swiftPackageManagerAnalyzerEnabled;
|
||||
|
||||
/**
|
||||
* Returns whether or not the Swift package Analyzer is enabled.
|
||||
*
|
||||
* @return whether or not the Swift package Analyzer is enabled
|
||||
*/
|
||||
public Boolean isSwiftPackageManagerAnalyzerEnabled() {
|
||||
return swiftPackageManagerAnalyzerEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the enabled state of the swift package manager analyzer.
|
||||
*
|
||||
* @param swiftPackageManagerAnalyzerEnabled the enabled state of the swift
|
||||
* package manager
|
||||
*/
|
||||
public void setSwiftPackageManagerAnalyzerEnabled(Boolean swiftPackageManagerAnalyzerEnabled) {
|
||||
this.swiftPackageManagerAnalyzerEnabled = swiftPackageManagerAnalyzerEnabled;
|
||||
}
|
||||
//end changes
|
||||
|
||||
/**
|
||||
* Whether or not the openssl analyzer is enabled.
|
||||
*/
|
||||
@@ -904,6 +1022,7 @@ public class Check extends Update {
|
||||
super.populateSettings();
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, enableExperimental);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_JAR_ENABLED, jarAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, pyDistributionAnalyzerEnabled);
|
||||
@@ -911,6 +1030,10 @@ public class Check extends Update {
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, rubygemsAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, opensslAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_CMAKE_ENABLED, cmakeAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, swiftPackageManagerAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, cocoapodsAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, bundleAuditAnalyzerEnabled);
|
||||
Settings.setStringIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, bundleAuditPath);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, autoconfAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, composerAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, nodeAnalyzerEnabled);
|
||||
|
||||
@@ -2,7 +2,7 @@ Configuration
|
||||
====================
|
||||
The dependency-check-purge task deletes the local copy of the NVD. This task
|
||||
should rarely be used, if ever. This is included as a convenience method in
|
||||
the rare circumstance that the local H2 database because corrupt.
|
||||
the rare circumstance that the local H2 database becomes corrupt.
|
||||
|
||||
```xml
|
||||
<target name="dependency-check-purge" description="Dependency-Check purge">
|
||||
|
||||
@@ -39,6 +39,7 @@ projectName | The name of the project being scanned.
|
||||
reportFormat | The report format to be generated (HTML, XML, VULN, ALL). This configuration option has no affect if using this within the Site plugin unless the externalReport is set to true. | HTML
|
||||
reportOutputDirectory | The location to write the report(s). Note, this is not used if generating the report as part of a `mvn site` build | 'target'
|
||||
suppressionFile | The file path to the XML suppression file \- used to suppress [false positives](../general/suppression.html) |
|
||||
hintsFile | The file path to the XML hints file \- used to resolve [false negatives](../general/hints.html) |
|
||||
proxyServer | The Proxy Server; see the [proxy configuration](../data/proxy.html) page for more information. |
|
||||
proxyPort | The Proxy Port. |
|
||||
proxyUsername | Defines the proxy user name. |
|
||||
@@ -71,9 +72,13 @@ cmakeAnalyzerEnabled | Sets whether the [experimental](../analyzers/ind
|
||||
autoconfAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) autoconf Analyzer should be used. | true
|
||||
composerAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) PHP Composer Lock File Analyzer should be used. | true
|
||||
nodeAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Node.js Analyzer should be used. | true
|
||||
nuspecAnalyzerEnabled | Sets whether the .NET Nuget Nuspec Analyzer will be used. | true
|
||||
assemblyAnalyzerEnabled | Sets whether the .NET Assembly Analyzer should be used. | true
|
||||
pathToMono | The path to Mono for .NET assembly analysis on non-windows systems. |
|
||||
nuspecAnalyzerEnabled | Sets whether the .NET Nuget Nuspec Analyzer will be used. | true
|
||||
cocoapodsAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Cocoapods Analyzer should be used. | true
|
||||
bundleAuditAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Bundle Audit Analyzer should be used. | true
|
||||
bundleAuditPath | Sets the path to the bundle audit executable; only used if bundle audit analyzer is enabled and experimental analyzers are enabled. |
|
||||
swiftPackageManagerAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Switft Package Analyzer should be used. | true
|
||||
assemblyAnalyzerEnabled | Sets whether the .NET Assembly Analyzer should be used. | true
|
||||
pathToMono | The path to Mono for .NET assembly analysis on non-windows systems. |
|
||||
|
||||
Advanced Configuration
|
||||
====================
|
||||
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>1.4.2</version>
|
||||
<version>1.4.5</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-cli</artifactId>
|
||||
@@ -140,6 +140,8 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
<binFileExtensions>
|
||||
<unix>.sh</unix>
|
||||
</binFileExtensions>
|
||||
<configurationDirectory>plugins/*</configurationDirectory>
|
||||
<includeConfigurationDirectoryInClasspath>true</includeConfigurationDirectoryInClasspath>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
|
||||
@@ -29,6 +29,13 @@
|
||||
<outputDirectory>dependency-check/repo</outputDirectory>
|
||||
<directory>${project.build.directory}/release/repo</directory>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>.</directory>
|
||||
<outputDirectory>dependency-check/plugins</outputDirectory>
|
||||
<excludes>
|
||||
<exclude>*/**</exclude>
|
||||
</excludes>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<outputDirectory>dependency-check</outputDirectory>
|
||||
<includes>
|
||||
@@ -53,21 +60,4 @@
|
||||
</includes>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
<!--
|
||||
<fileSets>
|
||||
<fileSet>
|
||||
<outputDirectory>/</outputDirectory>
|
||||
<directory>${project.build.directory}</directory>
|
||||
<includes>
|
||||
<include>dependency-check*.jar</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
<dependencySets>
|
||||
<dependencySet>
|
||||
<outputDirectory>/lib</outputDirectory>
|
||||
<scope>runtime</scope>
|
||||
</dependencySet>
|
||||
</dependencySets>
|
||||
-->
|
||||
</assembly>
|
||||
@@ -19,6 +19,7 @@ package org.owasp.dependencycheck;
|
||||
|
||||
import ch.qos.logback.classic.LoggerContext;
|
||||
import ch.qos.logback.classic.encoder.PatternLayoutEncoder;
|
||||
import ch.qos.logback.classic.spi.ILoggingEvent;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
@@ -32,6 +33,7 @@ import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.apache.tools.ant.DirectoryScanner;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.reporting.ReportGenerator;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
@@ -158,8 +160,13 @@ public class App {
|
||||
exitCode = -4;
|
||||
}
|
||||
try {
|
||||
runScan(cli.getReportDirectory(), cli.getReportFormat(), cli.getProjectName(), cli.getScanFiles(),
|
||||
cli.getExcludeList(), cli.getSymLinkDepth());
|
||||
final String[] scanFiles = cli.getScanFiles();
|
||||
if (scanFiles != null) {
|
||||
exitCode = runScan(cli.getReportDirectory(), cli.getReportFormat(), cli.getProjectName(), scanFiles,
|
||||
cli.getExcludeList(), cli.getSymLinkDepth(), cli.getFailOnCVSS());
|
||||
} else {
|
||||
LOGGER.error("No scan files configured");
|
||||
}
|
||||
} catch (InvalidScanPathException ex) {
|
||||
LOGGER.error("An invalid scan path was detected; unable to scan '//*' paths");
|
||||
exitCode = -10;
|
||||
@@ -172,7 +179,7 @@ public class App {
|
||||
} catch (ExceptionCollection ex) {
|
||||
if (ex.isFatal()) {
|
||||
exitCode = -13;
|
||||
LOGGER.error("One or more fatal errors occured");
|
||||
LOGGER.error("One or more fatal errors occurred");
|
||||
} else {
|
||||
exitCode = -14;
|
||||
}
|
||||
@@ -197,6 +204,8 @@ public class App {
|
||||
* @param files the files/directories to scan
|
||||
* @param excludes the patterns for files/directories to exclude
|
||||
* @param symLinkDepth the depth that symbolic links will be followed
|
||||
* @param cvssFailScore the score to fail on if a vulnerability is found
|
||||
* @return the exit code if there was an error
|
||||
*
|
||||
* @throws InvalidScanPathException thrown if the path to scan starts with
|
||||
* "//"
|
||||
@@ -207,9 +216,11 @@ public class App {
|
||||
* analysis; there may be multiple exceptions contained within the
|
||||
* collection.
|
||||
*/
|
||||
private void runScan(String reportDirectory, String outputFormat, String applicationName, String[] files,
|
||||
String[] excludes, int symLinkDepth) throws InvalidScanPathException, DatabaseException, ExceptionCollection, ReportException {
|
||||
private int runScan(String reportDirectory, String outputFormat, String applicationName, String[] files,
|
||||
String[] excludes, int symLinkDepth, int cvssFailScore) throws InvalidScanPathException, DatabaseException,
|
||||
ExceptionCollection, ReportException {
|
||||
Engine engine = null;
|
||||
int retCode = 0;
|
||||
try {
|
||||
engine = new Engine();
|
||||
final List<String> antStylePaths = new ArrayList<String>();
|
||||
@@ -293,15 +304,28 @@ public class App {
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
if (exCol != null && exCol.getExceptions().size()>0) {
|
||||
if (exCol != null && exCol.getExceptions().size() > 0) {
|
||||
throw exCol;
|
||||
}
|
||||
|
||||
//Set the exit code based on whether we found a high enough vulnerability
|
||||
for (Dependency dep : dependencies) {
|
||||
if (!dep.getVulnerabilities().isEmpty()) {
|
||||
for (Vulnerability vuln : dep.getVulnerabilities()) {
|
||||
LOGGER.debug("VULNERABILITY FOUND " + dep.getDisplayFileName());
|
||||
if (vuln.getCvssScore() > cvssFailScore) {
|
||||
retCode = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return retCode;
|
||||
} finally {
|
||||
if (engine != null) {
|
||||
engine.cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -342,6 +366,7 @@ public class App {
|
||||
final String dataDirectory = cli.getDataDirectory();
|
||||
final File propertiesFile = cli.getPropertiesFile();
|
||||
final String suppressionFile = cli.getSuppressionFile();
|
||||
final String hintsFile = cli.getHintsFile();
|
||||
final String nexusUrl = cli.getNexusUrl();
|
||||
final String databaseDriverName = cli.getDatabaseDriverName();
|
||||
final String databaseDriverPath = cli.getDatabaseDriverPath();
|
||||
@@ -389,6 +414,7 @@ public class App {
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPass);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
|
||||
Settings.setIntIfNotNull(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
|
||||
|
||||
//File Type Analyzer Settings
|
||||
@@ -405,6 +431,8 @@ public class App {
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, !cli.isOpenSSLDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, !cli.isComposerDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, !cli.isNodeJsDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, !cli.isSwiftPackageAnalyzerDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, !cli.isCocoapodsAnalyzerDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, !cli.isRubyGemspecDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, !cli.isCentralDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, !cli.isNexusDisabled());
|
||||
@@ -440,7 +468,7 @@ public class App {
|
||||
encoder.setPattern("%d %C:%L%n%-5level - %msg%n");
|
||||
encoder.setContext(context);
|
||||
encoder.start();
|
||||
final FileAppender fa = new FileAppender();
|
||||
final FileAppender<ILoggingEvent> fa = new FileAppender<ILoggingEvent>();
|
||||
fa.setAppend(true);
|
||||
fa.setEncoder(encoder);
|
||||
fa.setContext(context);
|
||||
|
||||
@@ -196,6 +196,10 @@ public final class CliParser {
|
||||
isValid = false;
|
||||
final String msg = String.format("Invalid '%s' argument: '%s'%nUnable to scan paths that start with '//'.", argumentName, path);
|
||||
throw new FileNotFoundException(msg);
|
||||
} else if ((path.endsWith("/*") && !path.endsWith("**/*")) || (path.endsWith("\\*") && path.endsWith("**\\*"))) {
|
||||
final String msg = String.format("Possibly incorrect path '%s' from argument '%s' because it ends with a slash star; "
|
||||
+ "dependency-check uses ant-style paths", path, argumentName);
|
||||
LOGGER.warn(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -273,6 +277,10 @@ public final class CliParser {
|
||||
.desc("The file path to the suppression XML file.")
|
||||
.build();
|
||||
|
||||
final Option hintsFile = Option.builder().argName("file").hasArg().longOpt(ARGUMENT.HINTS_FILE)
|
||||
.desc("The file path to the hints XML file.")
|
||||
.build();
|
||||
|
||||
final Option cveValidForHours = Option.builder().argName("hours").hasArg().longOpt(ARGUMENT.CVE_VALID_FOR_HOURS)
|
||||
.desc("The number of hours to wait before checking for new updates from the NVD.")
|
||||
.build();
|
||||
@@ -281,6 +289,11 @@ public final class CliParser {
|
||||
.desc("Enables the experimental analzers.")
|
||||
.build();
|
||||
|
||||
final Option failOnCVSS = Option.builder().argName("score").hasArg().longOpt(ARGUMENT.FAIL_ON_CVSS)
|
||||
.desc("Specifies if the build should be failed if a CVSS score above a specified level is identified. "
|
||||
+ "The default is 11; since the CVSS scores are 0-10, by default the build will never fail.")
|
||||
.build();
|
||||
|
||||
//This is an option group because it can be specified more then once.
|
||||
final OptionGroup og = new OptionGroup();
|
||||
og.addOption(path);
|
||||
@@ -301,8 +314,10 @@ public final class CliParser {
|
||||
.addOption(props)
|
||||
.addOption(verboseLog)
|
||||
.addOption(suppressionFile)
|
||||
.addOption(hintsFile)
|
||||
.addOption(cveValidForHours)
|
||||
.addOption(experimentalEnabled);
|
||||
.addOption(experimentalEnabled)
|
||||
.addOption(failOnCVSS);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -422,6 +437,11 @@ public final class CliParser {
|
||||
final Option disableCmakeAnalyzer = Option.builder().longOpt(ARGUMENT.DISABLE_CMAKE)
|
||||
.desc("Disable the Cmake Analyzer.").build();
|
||||
|
||||
final Option cocoapodsAnalyzerEnabled = Option.builder().longOpt(ARGUMENT.DISABLE_COCOAPODS)
|
||||
.desc("Disable the CocoaPods Analyzer.").build();
|
||||
final Option swiftPackageManagerAnalyzerEnabled = Option.builder().longOpt(ARGUMENT.DISABLE_SWIFT)
|
||||
.desc("Disable the swift package Analyzer.").build();
|
||||
|
||||
final Option disableCentralAnalyzer = Option.builder().longOpt(ARGUMENT.DISABLE_CENTRAL)
|
||||
.desc("Disable the Central Analyzer. If this analyzer is disabled it is likely you also want to disable "
|
||||
+ "the Nexus Analyzer.").build();
|
||||
@@ -466,6 +486,8 @@ public final class CliParser {
|
||||
.addOption(disableNuspecAnalyzer)
|
||||
.addOption(disableCentralAnalyzer)
|
||||
.addOption(disableNexusAnalyzer)
|
||||
.addOption(cocoapodsAnalyzerEnabled)
|
||||
.addOption(swiftPackageManagerAnalyzerEnabled)
|
||||
.addOption(Option.builder().longOpt(ARGUMENT.DISABLE_NODE_JS)
|
||||
.desc("Disable the Node.js Package Analyzer.").build())
|
||||
.addOption(nexusUrl)
|
||||
@@ -686,6 +708,28 @@ public final class CliParser {
|
||||
return (line != null) && line.hasOption(ARGUMENT.DISABLE_NODE_JS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the disableCocoapodsAnalyzer command line argument was
|
||||
* specified.
|
||||
*
|
||||
* @return true if the disableCocoapodsAnalyzer command line argument was
|
||||
* specified; otherwise false
|
||||
*/
|
||||
public boolean isCocoapodsAnalyzerDisabled() {
|
||||
return (line != null) && line.hasOption(ARGUMENT.DISABLE_COCOAPODS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the disableSwiftPackageManagerAnalyzer command line
|
||||
* argument was specified.
|
||||
*
|
||||
* @return true if the disableSwiftPackageManagerAnalyzer command line
|
||||
* argument was specified; otherwise false
|
||||
*/
|
||||
public boolean isSwiftPackageAnalyzerDisabled() {
|
||||
return (line != null) && line.hasOption(ARGUMENT.DISABLE_SWIFT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the disableCentral command line argument was specified.
|
||||
*
|
||||
@@ -958,6 +1002,15 @@ public final class CliParser {
|
||||
return line.getOptionValue(ARGUMENT.SUPPRESSION_FILE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the path to the hints file.
|
||||
*
|
||||
* @return the path to the hints file
|
||||
*/
|
||||
public String getHintsFile() {
|
||||
return line.getOptionValue(ARGUMENT.HINTS_FILE);
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Prints the manifest information to standard output.</p>
|
||||
@@ -966,7 +1019,7 @@ public final class CliParser {
|
||||
*/
|
||||
public void printVersionInfo() {
|
||||
final String version = String.format("%s version %s",
|
||||
Settings.getString(Settings.KEYS.APPLICATION_VAME, "dependency-check"),
|
||||
Settings.getString(Settings.KEYS.APPLICATION_NAME, "dependency-check"),
|
||||
Settings.getString(Settings.KEYS.APPLICATION_VERSION, "Unknown"));
|
||||
System.out.println(version);
|
||||
}
|
||||
@@ -1087,6 +1140,25 @@ public final class CliParser {
|
||||
return line.hasOption(ARGUMENT.EXPERIMENTAL);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the CVSS value to fail on.
|
||||
*
|
||||
* @return 11 if nothing is set. Otherwise it returns the int passed from
|
||||
* the command line arg
|
||||
*/
|
||||
public int getFailOnCVSS() {
|
||||
if (line.hasOption(ARGUMENT.FAIL_ON_CVSS)) {
|
||||
final String value = line.getOptionValue(ARGUMENT.FAIL_ON_CVSS);
|
||||
try {
|
||||
return Integer.parseInt(value);
|
||||
} catch (NumberFormatException nfe) {
|
||||
return 11;
|
||||
}
|
||||
} else {
|
||||
return 11;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A collection of static final strings that represent the possible command
|
||||
* line arguments.
|
||||
@@ -1269,8 +1341,12 @@ public final class CliParser {
|
||||
*/
|
||||
public static final String SUPPRESSION_FILE = "suppression";
|
||||
/**
|
||||
* The CLI argument name for setting the location of the suppression
|
||||
* file.
|
||||
* The CLI argument name for setting the location of the hint file.
|
||||
*/
|
||||
public static final String HINTS_FILE = "hints";
|
||||
/**
|
||||
* The CLI argument name for setting the number of hours to wait before
|
||||
* checking for new updates from the NVD.
|
||||
*/
|
||||
public static final String CVE_VALID_FOR_HOURS = "cveValidForHours";
|
||||
/**
|
||||
@@ -1305,6 +1381,14 @@ public final class CliParser {
|
||||
* Disables the Cmake Analyzer.
|
||||
*/
|
||||
public static final String DISABLE_CMAKE = "disableCmake";
|
||||
/**
|
||||
* Disables the cocoapods analyzer.
|
||||
*/
|
||||
public static final String DISABLE_COCOAPODS = "disableCocoapodsAnalyzer";
|
||||
/**
|
||||
* Disables the swift package manager analyzer.
|
||||
*/
|
||||
public static final String DISABLE_SWIFT = "disableSwiftPackageManagerAnalyzer";
|
||||
/**
|
||||
* Disables the Assembly Analyzer.
|
||||
*/
|
||||
@@ -1385,5 +1469,9 @@ public final class CliParser {
|
||||
* The CLI argument to enable the experimental analyzers.
|
||||
*/
|
||||
private static final String EXPERIMENTAL = "enableExperimental";
|
||||
/**
|
||||
* The CLI argument to enable the experimental analyzers.
|
||||
*/
|
||||
private static final String FAIL_ON_CVSS = "failOnCVSS";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ Short | Argument Name | Parameter | Description | Requir
|
||||
| \-\-symLink | \<depth\> | The depth that symbolic links will be followed; the default is 0 meaning symbolic links will not be followed. | Optional
|
||||
\-o | \-\-out | \<path\> | The folder to write reports to. This defaults to the current directory. If the format is not set to ALL one could specify a specific file name. | Optional
|
||||
\-f | \-\-format | \<format\> | The output format to write to (XML, HTML, VULN, ALL). The default is HTML. | Required
|
||||
| \-\-failOnCvss | \<score\> | If the score set between 0 and 10 the exit code from dependency-check will indicate if a vulnerability with a CVSS score equal to or higher was identified. | Optional
|
||||
\-l | \-\-log | \<file\> | The file path to write verbose logging information. | Optional
|
||||
\-n | \-\-noupdate | | Disables the automatic updating of the CPE data. | Optional
|
||||
| \-\-suppression | \<file\> | The file path to the suppression XML file; used to suppress [false positives](../general/suppression.html). | Optional
|
||||
@@ -34,13 +35,15 @@ Short | Argument Name | Paramete
|
||||
| \-\-disablePyPkg | | Sets whether the [experimental](../analyzers/index.html) Python Package Analyzer will be used. | false
|
||||
| \-\-disableNodeJS | | Sets whether the [experimental](../analyzers/index.html) Node.js Package Analyzer will be used. | false
|
||||
| \-\-disableRubygems | | Sets whether the [experimental](../analyzers/index.html) Ruby Gemspec Analyzer will be used. | false
|
||||
| \-\-disableBundleAudit | | Sets whether the [experimental](../analyzers/index.html) Ruby Bundler Audit Analyzer will be used. | false
|
||||
| \-\-disableBundleAudit | | Sets whether the [experimental](../analyzers/index.html) Ruby Bundler Audit Analyzer will be used. | false
|
||||
| \-\-disableCocoapodsAnalyzer | | Sets whether the [experimental](../analyzers/index.html) Cocoapods Analyzer will be used. | false
|
||||
| \-\-disableSwiftPackageManagerAnalyzer | | Sets whether the [experimental](../analyzers/index.html) Swift Package Manager Analyzer will be used. | false
|
||||
| \-\-disableAutoconf | | Sets whether the [experimental](../analyzers/index.html) Autoconf Analyzer will be used. | false
|
||||
| \-\-disableOpenSSL | | Sets whether the OpenSSL Analyzer will be used. | false
|
||||
| \-\-disableOpenSSL | | Sets whether the OpenSSL Analyzer will be used. | false
|
||||
| \-\-disableCmake | | Sets whether the [experimental](../analyzers/index.html) Cmake Analyzer will be disabled. | false
|
||||
| \-\-disableArchive | | Sets whether the Archive Analyzer will be disabled. | false
|
||||
| \-\-disableArchive | | Sets whether the Archive Analyzer will be disabled. | false
|
||||
| \-\-zipExtensions | \<strings\> | A comma-separated list of additional file extensions to be treated like a ZIP file, the contents will be extracted and analyzed. |
|
||||
| \-\-disableJar | | Sets whether the Jar Analyzer will be disabled. | false
|
||||
| \-\-disableJar | | Sets whether the Jar Analyzer will be disabled. | false
|
||||
| \-\-disableComposer | | Sets whether the [experimental](../analyzers/index.html) PHP Composer Lock File Analyzer will be disabled. | false
|
||||
| \-\-disableCentral | | Sets whether the Central Analyzer will be used. **Disabling this analyzer is not recommended as it could lead to false negatives (e.g. libraries that have vulnerabilities may not be reported correctly).** If this analyzer is being disabled there is a good chance you also want to disable the Nexus Analyzer. | false
|
||||
| \-\-disableNexus | | Sets whether the Nexus Analyzer will be used. Note, this has been superceded by the Central Analyzer. However, you can configure the Nexus URL to utilize an internally hosted Nexus Pro server. | false
|
||||
|
||||
@@ -9,10 +9,7 @@ Installation & Usage
|
||||
====================
|
||||
Download the dependency-check command line tool [here](http://dl.bintray.com/jeremy-long/owasp/dependency-check-${project.version}-release.zip).
|
||||
Extract the zip file to a location on your computer and put the 'bin' directory into the
|
||||
path environment variable. On \*nix systems you will likely need to make the shell
|
||||
script executable:
|
||||
|
||||
$ chmod +777 dependency-check.sh
|
||||
path environment variable.
|
||||
|
||||
#set( $H = '#' )
|
||||
|
||||
|
||||
@@ -115,6 +115,63 @@ public class CliParserTest {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of parse method with failOnCVSS without an argument
|
||||
*
|
||||
* @throws Exception thrown when an exception occurs.
|
||||
*/
|
||||
@Test
|
||||
public void testParse_failOnCVSSNoArg() throws Exception {
|
||||
|
||||
String[] args = {"--failOnCVSS"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
try {
|
||||
instance.parse(args);
|
||||
} catch (ParseException ex) {
|
||||
Assert.assertTrue(ex.getMessage().contains("Missing argument"));
|
||||
}
|
||||
Assert.assertFalse(instance.isGetVersion());
|
||||
Assert.assertFalse(instance.isGetHelp());
|
||||
Assert.assertFalse(instance.isRunScan());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of parse method with failOnCVSS invalid argument. It should default to 11
|
||||
*
|
||||
* @throws Exception thrown when an exception occurs.
|
||||
*/
|
||||
@Test
|
||||
public void testParse_failOnCVSSInvalidArgument() throws Exception {
|
||||
|
||||
String[] args = {"--failOnCVSS","bad"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
instance.parse(args);
|
||||
Assert.assertEquals("Default should be 11", 11, instance.getFailOnCVSS());
|
||||
Assert.assertFalse(instance.isGetVersion());
|
||||
Assert.assertFalse(instance.isGetHelp());
|
||||
Assert.assertFalse(instance.isRunScan());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of parse method with failOnCVSS invalid argument. It should default to 11
|
||||
*
|
||||
* @throws Exception thrown when an exception occurs.
|
||||
*/
|
||||
@Test
|
||||
public void testParse_failOnCVSSValidArgument() throws Exception {
|
||||
|
||||
String[] args = {"--failOnCVSS","6"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
instance.parse(args);
|
||||
Assert.assertEquals(6, instance.getFailOnCVSS());
|
||||
Assert.assertFalse(instance.isGetVersion());
|
||||
Assert.assertFalse(instance.isGetHelp());
|
||||
Assert.assertFalse(instance.isRunScan());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of parse method with jar and cpe args, of class CliParser.
|
||||
*
|
||||
@@ -196,7 +253,7 @@ public class CliParserTest {
|
||||
*/
|
||||
@Test
|
||||
public void testParse_scan_withFileExists() throws Exception {
|
||||
File path = new File(this.getClass().getClassLoader().getResource("checkSumTest.file").getPath());
|
||||
File path = new File(this.getClass().getClassLoader().getResource("checkSumTest.file").toURI().getPath());
|
||||
String[] args = {"-scan", path.getCanonicalPath(), "-out", "./", "-app", "test"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>1.4.2</version>
|
||||
<version>1.4.5</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-core</artifactId>
|
||||
@@ -261,6 +261,10 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
</reporting>
|
||||
<dependencies>
|
||||
<!-- Note, to stay compatible with Jenkins installations only JARs compiled to 1.6 can be used -->
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>annotations</artifactId>
|
||||
@@ -575,15 +579,19 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
<profile>
|
||||
<!-- The following profile adds additional
|
||||
dependencies that are only used during testing.
|
||||
Additionally, these are only added when using "allTests" to
|
||||
make the build slightly faster in most cases. -->
|
||||
<!--
|
||||
The following profile adds additional dependencies that are only
|
||||
used during testing.
|
||||
|
||||
TODO move the following FP tests to a seperate invoker test in the
|
||||
maven plugin project. Add checks against the XML to validate that
|
||||
these do not report FP.
|
||||
-->
|
||||
<!--profile>
|
||||
<id>False Positive Tests</id>
|
||||
<activation>
|
||||
<property>
|
||||
<name>allTests</name>
|
||||
<name>releaseTesting</name>
|
||||
</property>
|
||||
</activation>
|
||||
<dependencies>
|
||||
@@ -664,13 +672,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-webmvc</artifactId>
|
||||
<version>3.2.12.RELEASE</version>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.code.gson</groupId>
|
||||
<artifactId>gson</artifactId>
|
||||
@@ -728,6 +729,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</profile>
|
||||
</profile-->
|
||||
</profiles>
|
||||
</project>
|
||||
|
||||
@@ -0,0 +1,130 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2016 Stefan Neuhaus. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck;
|
||||
|
||||
import org.owasp.dependencycheck.analyzer.Analyzer;
|
||||
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
/**
|
||||
* Task to support parallelism of dependency-check analysis. Analyses a single
|
||||
* {@link Dependency} by a specific {@link Analyzer}.
|
||||
*
|
||||
* @author Stefan Neuhaus
|
||||
*/
|
||||
class AnalysisTask implements Callable<Void> {
|
||||
|
||||
/**
|
||||
* Instance of the logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(AnalysisTask.class);
|
||||
|
||||
/**
|
||||
* A reference to the analyzer.
|
||||
*/
|
||||
private final Analyzer analyzer;
|
||||
/**
|
||||
* The dependency to analyze.
|
||||
*/
|
||||
private final Dependency dependency;
|
||||
/**
|
||||
* A reference to the dependency-check engine.
|
||||
*/
|
||||
private final Engine engine;
|
||||
/**
|
||||
* The list of exceptions that may occur during analysis.
|
||||
*/
|
||||
private final List<Throwable> exceptions;
|
||||
/**
|
||||
* A reference to the global settings object.
|
||||
*/
|
||||
private final Settings settings;
|
||||
|
||||
/**
|
||||
* Creates a new analysis task.
|
||||
*
|
||||
* @param analyzer a reference of the analyzer to execute
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the dependency-check engine
|
||||
* @param exceptions exceptions that occur during analysis will be added to
|
||||
* this collection of exceptions
|
||||
* @param settings a reference to the global settings object; this is
|
||||
* necessary so that when the thread is started the dependencies have a
|
||||
* correct reference to the global settings.
|
||||
*/
|
||||
AnalysisTask(Analyzer analyzer, Dependency dependency, Engine engine, List<Throwable> exceptions, Settings settings) {
|
||||
this.analyzer = analyzer;
|
||||
this.dependency = dependency;
|
||||
this.engine = engine;
|
||||
this.exceptions = exceptions;
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the analysis task.
|
||||
*
|
||||
* @return null
|
||||
* @throws Exception thrown if unable to execute the analysis task
|
||||
*/
|
||||
@Override
|
||||
public Void call() {
|
||||
try {
|
||||
Settings.setInstance(settings);
|
||||
|
||||
if (shouldAnalyze()) {
|
||||
LOGGER.debug("Begin Analysis of '{}' ({})", dependency.getActualFilePath(), analyzer.getName());
|
||||
try {
|
||||
analyzer.analyze(dependency, engine);
|
||||
} catch (AnalysisException ex) {
|
||||
LOGGER.warn("An error occurred while analyzing '{}' ({}).", dependency.getActualFilePath(), analyzer.getName());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
} catch (Throwable ex) {
|
||||
LOGGER.warn("An unexpected error occurred during analysis of '{}' ({}): {}",
|
||||
dependency.getActualFilePath(), analyzer.getName(), ex.getMessage());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
Settings.cleanup(false);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the analyzer can analyze the given dependency.
|
||||
*
|
||||
* @return whether or not the analyzer can analyze the dependency
|
||||
*/
|
||||
boolean shouldAnalyze() {
|
||||
if (analyzer instanceof FileTypeAnalyzer) {
|
||||
final FileTypeAnalyzer fileTypeAnalyzer = (FileTypeAnalyzer) analyzer;
|
||||
return fileTypeAnalyzer.accept(dependency.getActualFile());
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -21,7 +21,6 @@ import org.owasp.dependencycheck.analyzer.AnalysisPhase;
|
||||
import org.owasp.dependencycheck.analyzer.Analyzer;
|
||||
import org.owasp.dependencycheck.analyzer.AnalyzerService;
|
||||
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.ConnectionFactory;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
@@ -29,9 +28,9 @@ import org.owasp.dependencycheck.data.update.CachedWebDataSource;
|
||||
import org.owasp.dependencycheck.data.update.UpdateService;
|
||||
import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.NoDataException;
|
||||
import org.owasp.dependencycheck.exception.ExceptionCollection;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.exception.NoDataException;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
@@ -41,12 +40,19 @@ import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CancellationException;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Scans files, directories, etc. for Dependencies. Analyzers are loaded and
|
||||
@@ -61,7 +67,7 @@ public class Engine implements FileFilter {
|
||||
/**
|
||||
* The list of dependencies.
|
||||
*/
|
||||
private List<Dependency> dependencies = new ArrayList<Dependency>();
|
||||
private final List<Dependency> dependencies = Collections.synchronizedList(new ArrayList<Dependency>());
|
||||
/**
|
||||
* A Map of analyzers grouped by Analysis phase.
|
||||
*/
|
||||
@@ -156,11 +162,17 @@ public class Engine implements FileFilter {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the dependencies identified.
|
||||
* Get the dependencies identified. The returned list is a reference to the
|
||||
* engine's synchronized list. <b>You must synchronize on the returned
|
||||
* list</b> when you modify and iterate over it from multiple threads. E.g.
|
||||
* this holds for analyzers supporting parallel processing during their
|
||||
* analysis phase.
|
||||
*
|
||||
* @return the dependencies identified
|
||||
* @see Collections#synchronizedList(List)
|
||||
* @see Analyzer#supportsParallelProcessing()
|
||||
*/
|
||||
public List<Dependency> getDependencies() {
|
||||
public synchronized List<Dependency> getDependencies() {
|
||||
return dependencies;
|
||||
}
|
||||
|
||||
@@ -170,7 +182,10 @@ public class Engine implements FileFilter {
|
||||
* @param dependencies the dependencies
|
||||
*/
|
||||
public void setDependencies(List<Dependency> dependencies) {
|
||||
this.dependencies = dependencies;
|
||||
synchronized (this.dependencies) {
|
||||
this.dependencies.clear();
|
||||
this.dependencies.addAll(dependencies);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -183,9 +198,24 @@ public class Engine implements FileFilter {
|
||||
* @since v0.3.2.5
|
||||
*/
|
||||
public List<Dependency> scan(String[] paths) {
|
||||
return scan(paths, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans an array of files or directories. If a directory is specified, it
|
||||
* will be scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param paths an array of paths to files or directories to be analyzed
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of dependencies scanned
|
||||
* @since v1.4.4
|
||||
*/
|
||||
public List<Dependency> scan(String[] paths, String projectReference) {
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
for (String path : paths) {
|
||||
final List<Dependency> d = scan(path);
|
||||
final List<Dependency> d = scan(path, projectReference);
|
||||
if (d != null) {
|
||||
deps.addAll(d);
|
||||
}
|
||||
@@ -202,8 +232,23 @@ public class Engine implements FileFilter {
|
||||
* @return the list of dependencies scanned
|
||||
*/
|
||||
public List<Dependency> scan(String path) {
|
||||
return scan(path, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a given file or directory. If a directory is specified, it will be
|
||||
* scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param path the path to a file or directory to be analyzed
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of dependencies scanned
|
||||
* @since v1.4.4
|
||||
*/
|
||||
public List<Dependency> scan(String path, String projectReference) {
|
||||
final File file = new File(path);
|
||||
return scan(file);
|
||||
return scan(file, projectReference);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -216,9 +261,24 @@ public class Engine implements FileFilter {
|
||||
* @since v0.3.2.5
|
||||
*/
|
||||
public List<Dependency> scan(File[] files) {
|
||||
return scan(files, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans an array of files or directories. If a directory is specified, it
|
||||
* will be scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param files an array of paths to files or directories to be analyzed.
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of dependencies
|
||||
* @since v1.4.4
|
||||
*/
|
||||
public List<Dependency> scan(File[] files, String projectReference) {
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
for (File file : files) {
|
||||
final List<Dependency> d = scan(file);
|
||||
final List<Dependency> d = scan(file, projectReference);
|
||||
if (d != null) {
|
||||
deps.addAll(d);
|
||||
}
|
||||
@@ -236,9 +296,24 @@ public class Engine implements FileFilter {
|
||||
* @since v0.3.2.5
|
||||
*/
|
||||
public List<Dependency> scan(Collection<File> files) {
|
||||
return scan(files, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a collection of files or directories. If a directory is specified,
|
||||
* it will be scanned recursively. Any dependencies identified are added to
|
||||
* the dependency collection.
|
||||
*
|
||||
* @param files a set of paths to files or directories to be analyzed
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of dependencies scanned
|
||||
* @since v1.4.4
|
||||
*/
|
||||
public List<Dependency> scan(Collection<File> files, String projectReference) {
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
for (File file : files) {
|
||||
final List<Dependency> d = scan(file);
|
||||
final List<Dependency> d = scan(file, projectReference);
|
||||
if (d != null) {
|
||||
deps.addAll(d);
|
||||
}
|
||||
@@ -256,11 +331,26 @@ public class Engine implements FileFilter {
|
||||
* @since v0.3.2.4
|
||||
*/
|
||||
public List<Dependency> scan(File file) {
|
||||
return scan(file, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a given file or directory. If a directory is specified, it will be
|
||||
* scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param file the path to a file or directory to be analyzed
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of dependencies scanned
|
||||
* @since v1.4.4
|
||||
*/
|
||||
public List<Dependency> scan(File file, String projectReference) {
|
||||
if (file.exists()) {
|
||||
if (file.isDirectory()) {
|
||||
return scanDirectory(file);
|
||||
return scanDirectory(file, projectReference);
|
||||
} else {
|
||||
final Dependency d = scanFile(file);
|
||||
final Dependency d = scanFile(file, projectReference);
|
||||
if (d != null) {
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
deps.add(d);
|
||||
@@ -279,17 +369,31 @@ public class Engine implements FileFilter {
|
||||
* @return the list of Dependency objects scanned
|
||||
*/
|
||||
protected List<Dependency> scanDirectory(File dir) {
|
||||
return scanDirectory(dir, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively scans files and directories. Any dependencies identified are
|
||||
* added to the dependency collection.
|
||||
*
|
||||
* @param dir the directory to scan
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of Dependency objects scanned
|
||||
* @since v1.4.4
|
||||
*/
|
||||
protected List<Dependency> scanDirectory(File dir, String projectReference) {
|
||||
final File[] files = dir.listFiles();
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
if (files != null) {
|
||||
for (File f : files) {
|
||||
if (f.isDirectory()) {
|
||||
final List<Dependency> d = scanDirectory(f);
|
||||
final List<Dependency> d = scanDirectory(f, projectReference);
|
||||
if (d != null) {
|
||||
deps.addAll(d);
|
||||
}
|
||||
} else {
|
||||
final Dependency d = scanFile(f);
|
||||
final Dependency d = scanFile(f, projectReference);
|
||||
deps.add(d);
|
||||
}
|
||||
}
|
||||
@@ -305,14 +409,54 @@ public class Engine implements FileFilter {
|
||||
* @return the scanned dependency
|
||||
*/
|
||||
protected Dependency scanFile(File file) {
|
||||
return scanFile(file, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a specified file. If a dependency is identified it is added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param file The file to scan
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the scanned dependency
|
||||
* @since v1.4.4
|
||||
*/
|
||||
protected Dependency scanFile(File file, String projectReference) {
|
||||
Dependency dependency = null;
|
||||
if (file.isFile()) {
|
||||
if (accept(file)) {
|
||||
dependency = new Dependency(file);
|
||||
dependencies.add(dependency);
|
||||
if (projectReference != null) {
|
||||
dependency.addProjectReference(projectReference);
|
||||
}
|
||||
final String sha1 = dependency.getSha1sum();
|
||||
boolean found = false;
|
||||
synchronized (dependencies) {
|
||||
if (sha1 != null) {
|
||||
for (Dependency existing : dependencies) {
|
||||
if (sha1.equals(existing.getSha1sum())) {
|
||||
found = true;
|
||||
if (projectReference != null) {
|
||||
existing.addProjectReference(projectReference);
|
||||
}
|
||||
if (existing.getActualFilePath() != null && dependency.getActualFilePath() != null
|
||||
&& !existing.getActualFilePath().equals(dependency.getActualFilePath())) {
|
||||
existing.addRelatedDependency(dependency);
|
||||
} else {
|
||||
dependency = existing;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
dependencies.add(dependency);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
LOGGER.debug("Path passed to scanFile(File) is not a file: {}. Skipping the file.", file);
|
||||
}
|
||||
} else {
|
||||
LOGGER.debug("Path passed to scanFile(File) is not a file: {}. Skipping the file.", file);
|
||||
}
|
||||
return dependency;
|
||||
}
|
||||
@@ -323,7 +467,7 @@ public class Engine implements FileFilter {
|
||||
* iterates over a copy of the dependencies list. Thus, the potential for
|
||||
* {@link java.util.ConcurrentModificationException}s is avoided, and
|
||||
* analyzers may safely add or remove entries from the dependencies list.
|
||||
*
|
||||
* <p>
|
||||
* Every effort is made to complete analysis on the dependencies. In some
|
||||
* cases an exception will occur with part of the analysis being performed
|
||||
* which may not affect the entire analysis. If an exception occurs it will
|
||||
@@ -333,7 +477,7 @@ public class Engine implements FileFilter {
|
||||
* during analysis
|
||||
*/
|
||||
public void analyzeDependencies() throws ExceptionCollection {
|
||||
final List<Throwable> exceptions = new ArrayList<Throwable>();
|
||||
final List<Throwable> exceptions = Collections.synchronizedList(new ArrayList<Throwable>());
|
||||
boolean autoUpdate = true;
|
||||
try {
|
||||
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
|
||||
@@ -356,60 +500,36 @@ public class Engine implements FileFilter {
|
||||
try {
|
||||
ensureDataExists();
|
||||
} catch (NoDataException ex) {
|
||||
LOGGER.error("{}\n\nUnable to continue dependency-check analysis.", ex.getMessage());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
throw new ExceptionCollection("Unable to continue dependency-check analysis.", exceptions, true);
|
||||
throwFatalExceptionCollection("Unable to continue dependency-check analysis.", ex, exceptions);
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.error("{}\n\nUnable to continue dependency-check analysis.", ex.getMessage());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
throw new ExceptionCollection("Unable to connect to the dependency-check database", exceptions, true);
|
||||
throwFatalExceptionCollection("Unable to connect to the dependency-check database.", ex, exceptions);
|
||||
}
|
||||
|
||||
LOGGER.debug("\n----------------------------------------------------\nBEGIN ANALYSIS\n----------------------------------------------------");
|
||||
LOGGER.info("Analysis Starting");
|
||||
LOGGER.info("Analysis Started");
|
||||
final long analysisStart = System.currentTimeMillis();
|
||||
|
||||
// analysis phases
|
||||
for (AnalysisPhase phase : AnalysisPhase.values()) {
|
||||
final List<Analyzer> analyzerList = analyzers.get(phase);
|
||||
|
||||
for (Analyzer a : analyzerList) {
|
||||
for (final Analyzer analyzer : analyzerList) {
|
||||
final long analyzerStart = System.currentTimeMillis();
|
||||
try {
|
||||
a = initializeAnalyzer(a);
|
||||
initializeAnalyzer(analyzer);
|
||||
} catch (InitializationException ex) {
|
||||
exceptions.add(ex);
|
||||
continue;
|
||||
}
|
||||
|
||||
/* need to create a copy of the collection because some of the
|
||||
* analyzers may modify it. This prevents ConcurrentModificationExceptions.
|
||||
* This is okay for adds/deletes because it happens per analyzer.
|
||||
*/
|
||||
LOGGER.debug("Begin Analyzer '{}'", a.getName());
|
||||
final Set<Dependency> dependencySet = new HashSet<Dependency>(dependencies);
|
||||
for (Dependency d : dependencySet) {
|
||||
boolean shouldAnalyze = true;
|
||||
if (a instanceof FileTypeAnalyzer) {
|
||||
final FileTypeAnalyzer fAnalyzer = (FileTypeAnalyzer) a;
|
||||
shouldAnalyze = fAnalyzer.accept(d.getActualFile());
|
||||
}
|
||||
if (shouldAnalyze) {
|
||||
LOGGER.debug("Begin Analysis of '{}'", d.getActualFilePath());
|
||||
try {
|
||||
a.analyze(d, this);
|
||||
} catch (AnalysisException ex) {
|
||||
LOGGER.warn("An error occurred while analyzing '{}'.", d.getActualFilePath());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
} catch (Throwable ex) {
|
||||
//final AnalysisException ax = new AnalysisException(axMsg, ex);
|
||||
LOGGER.warn("An unexpected error occurred during analysis of '{}'", d.getActualFilePath());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
}
|
||||
}
|
||||
if (analyzer.isEnabled()) {
|
||||
executeAnalysisTasks(analyzer, exceptions);
|
||||
|
||||
final long analyzerDurationMillis = System.currentTimeMillis() - analyzerStart;
|
||||
final long analyzerDurationSeconds = TimeUnit.MILLISECONDS.toSeconds(analyzerDurationMillis);
|
||||
LOGGER.info("Finished {} ({} seconds)", analyzer.getName(), analyzerDurationSeconds);
|
||||
} else {
|
||||
LOGGER.debug("Skipping {} (not enabled)", analyzer.getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -422,9 +542,80 @@ public class Engine implements FileFilter {
|
||||
}
|
||||
|
||||
LOGGER.debug("\n----------------------------------------------------\nEND ANALYSIS\n----------------------------------------------------");
|
||||
LOGGER.info("Analysis Complete ({} ms)", System.currentTimeMillis() - analysisStart);
|
||||
final long analysisDurationSeconds = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - analysisStart);
|
||||
LOGGER.info("Analysis Complete ({} seconds)", analysisDurationSeconds);
|
||||
if (exceptions.size() > 0) {
|
||||
throw new ExceptionCollection("One or more exceptions occured during dependency-check analysis", exceptions);
|
||||
throw new ExceptionCollection("One or more exceptions occurred during dependency-check analysis", exceptions);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes executes the analyzer using multiple threads.
|
||||
*
|
||||
* @param exceptions a collection of exceptions that occurred during
|
||||
* analysis
|
||||
* @param analyzer the analyzer to execute
|
||||
* @throws ExceptionCollection thrown if exceptions occurred during analysis
|
||||
*/
|
||||
void executeAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) throws ExceptionCollection {
|
||||
LOGGER.debug("Starting {}", analyzer.getName());
|
||||
final List<AnalysisTask> analysisTasks = getAnalysisTasks(analyzer, exceptions);
|
||||
final ExecutorService executorService = getExecutorService(analyzer);
|
||||
|
||||
try {
|
||||
final List<Future<Void>> results = executorService.invokeAll(analysisTasks, 10, TimeUnit.MINUTES);
|
||||
|
||||
// ensure there was no exception during execution
|
||||
for (Future<Void> result : results) {
|
||||
try {
|
||||
result.get();
|
||||
} catch (ExecutionException e) {
|
||||
throwFatalExceptionCollection("Analysis task failed with a fatal exception.", e, exceptions);
|
||||
} catch (CancellationException e) {
|
||||
throwFatalExceptionCollection("Analysis task timed out.", e, exceptions);
|
||||
}
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
throwFatalExceptionCollection("Analysis has been interrupted.", e, exceptions);
|
||||
} finally {
|
||||
executorService.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the analysis tasks for the dependencies.
|
||||
*
|
||||
* @param analyzer the analyzer to create tasks for
|
||||
* @param exceptions the collection of exceptions to collect
|
||||
* @return a collection of analysis tasks
|
||||
*/
|
||||
List<AnalysisTask> getAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) {
|
||||
final List<AnalysisTask> result = new ArrayList<AnalysisTask>();
|
||||
synchronized (dependencies) {
|
||||
for (final Dependency dependency : dependencies) {
|
||||
final AnalysisTask task = new AnalysisTask(analyzer, dependency, this, exceptions, Settings.getInstance());
|
||||
result.add(task);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the executor service for a given analyzer.
|
||||
*
|
||||
* @param analyzer the analyzer to obtain an executor
|
||||
* @return the executor service
|
||||
*/
|
||||
ExecutorService getExecutorService(Analyzer analyzer) {
|
||||
if (analyzer.supportsParallelProcessing()) {
|
||||
// just a fair trade-off that should be reasonable for all analyzer types
|
||||
final int maximumNumberOfThreads = 4 * Runtime.getRuntime().availableProcessors();
|
||||
|
||||
LOGGER.debug("Parallel processing with up to {} threads: {}.", maximumNumberOfThreads, analyzer.getName());
|
||||
return Executors.newFixedThreadPool(maximumNumberOfThreads);
|
||||
} else {
|
||||
LOGGER.debug("Parallel processing is not supported: {}.", analyzer.getName());
|
||||
return Executors.newSingleThreadExecutor();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -539,6 +730,16 @@ public class Engine implements FileFilter {
|
||||
return this.fileTypeAnalyzers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a file type analyzer. This has been added solely to assist in unit
|
||||
* testing the Engine.
|
||||
*
|
||||
* @param fta the file type analyzer to add
|
||||
*/
|
||||
protected void addFileTypeAnalyzer(FileTypeAnalyzer fta) {
|
||||
this.fileTypeAnalyzers.add(fta);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the CPE Index to ensure documents exists. If none exist a
|
||||
* NoDataException is thrown.
|
||||
@@ -560,4 +761,20 @@ public class Engine implements FileFilter {
|
||||
cve.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs and throws a fatal exception collection.
|
||||
*
|
||||
* @param message the exception message
|
||||
* @param throwable the cause
|
||||
* @param exceptions a collection of exception to include
|
||||
* @throws ExceptionCollection a collection of exceptions that occurred
|
||||
* during analysis
|
||||
*/
|
||||
private void throwFatalExceptionCollection(String message, Throwable throwable, List<Throwable> exceptions) throws ExceptionCollection {
|
||||
LOGGER.error("{}\n\n{}", throwable.getMessage(), message);
|
||||
LOGGER.debug("", throwable);
|
||||
exceptions.add(throwable);
|
||||
throw new ExceptionCollection(message, exceptions, true);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,24 +17,123 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Base class for analyzers to avoid code duplication of initialize and close
|
||||
* as most analyzers do not need these methods.
|
||||
* Base class for analyzers to avoid code duplication of initialize and close as
|
||||
* most analyzers do not need these methods.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public abstract class AbstractAnalyzer implements Analyzer {
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractAnalyzer.class);
|
||||
/**
|
||||
* A flag indicating whether or not the analyzer is enabled.
|
||||
*/
|
||||
private volatile boolean enabled = true;
|
||||
|
||||
/**
|
||||
* Get the value of enabled.
|
||||
*
|
||||
* @return the value of enabled
|
||||
*/
|
||||
@Override
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of enabled.
|
||||
*
|
||||
* @param enabled new value of enabled
|
||||
*/
|
||||
public void setEnabled(boolean enabled) {
|
||||
this.enabled = enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
protected abstract String getAnalyzerEnabledSettingKey();
|
||||
|
||||
/**
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a
|
||||
* WAR or EAR, the contents are extracted, scanned, and added to the list of
|
||||
* dependencies within the engine.
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the engine scanning
|
||||
* @throws AnalysisException thrown if there is an analysis exception
|
||||
*/
|
||||
protected abstract void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException;
|
||||
|
||||
/**
|
||||
* Initializes a given Analyzer. This will be skipped if the analyzer is disabled.
|
||||
*
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
protected void initializeAnalyzer() throws InitializationException {
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes a given Analyzer. This will be skipped if the analyzer is disabled.
|
||||
*
|
||||
* @throws Exception thrown if there is an exception
|
||||
*/
|
||||
protected void closeAnalyzer() throws Exception {
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a
|
||||
* WAR or EAR, the contents are extracted, scanned, and added to the list of
|
||||
* dependencies within the engine.
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the engine scanning
|
||||
* @throws AnalysisException thrown if there is an analysis exception
|
||||
*/
|
||||
@Override
|
||||
public final void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (this.isEnabled()) {
|
||||
analyzeDependency(dependency, engine);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The initialize method does nothing for this Analyzer.
|
||||
*
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
@Override
|
||||
public void initialize() throws InitializationException {
|
||||
//do nothing
|
||||
public final void initialize() throws InitializationException {
|
||||
final String key = getAnalyzerEnabledSettingKey();
|
||||
try {
|
||||
this.setEnabled(Settings.getBoolean(key, true));
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.warn("Invalid setting for property '{}'", key);
|
||||
LOGGER.debug("", ex);
|
||||
}
|
||||
|
||||
if (isEnabled()) {
|
||||
initializeAnalyzer();
|
||||
} else {
|
||||
LOGGER.debug("{} has been disabled", getName());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -43,7 +142,20 @@ public abstract class AbstractAnalyzer implements Analyzer {
|
||||
* @throws Exception thrown if there is an exception
|
||||
*/
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
//do nothing
|
||||
public final void close() throws Exception {
|
||||
if (isEnabled()) {
|
||||
closeAnalyzer();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The default is to support parallel processing.
|
||||
*
|
||||
* @return true
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,11 +17,6 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -40,17 +35,7 @@ import org.owasp.dependencycheck.exception.InitializationException;
|
||||
*/
|
||||
public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implements FileTypeAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Constructor">
|
||||
/**
|
||||
* Base constructor that all children must call. This checks the
|
||||
* configuration to determine if the analyzer is enabled.
|
||||
*/
|
||||
public AbstractFileTypeAnalyzer() {
|
||||
reset();
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Field definitions">
|
||||
//<editor-fold defaultstate="collapsed" desc="Field definitions, getters, and setters ">
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
@@ -80,30 +65,24 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
this.filesMatched = filesMatched;
|
||||
}
|
||||
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="Final implementations for the Analyzer interface">
|
||||
/**
|
||||
* A flag indicating whether or not the analyzer is enabled.
|
||||
*/
|
||||
private boolean enabled = true;
|
||||
|
||||
/**
|
||||
* Get the value of enabled.
|
||||
* Initializes the analyzer.
|
||||
*
|
||||
* @return the value of enabled
|
||||
* @throws InitializationException thrown if there is an exception during
|
||||
* initialization
|
||||
*/
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
@Override
|
||||
protected final void initializeAnalyzer() throws InitializationException {
|
||||
if (filesMatched) {
|
||||
initializeFileTypeAnalyzer();
|
||||
} else {
|
||||
this.setEnabled(false);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of enabled.
|
||||
*
|
||||
* @param enabled new value of enabled
|
||||
*/
|
||||
public void setEnabled(boolean enabled) {
|
||||
this.enabled = enabled;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="Abstract methods children must implement">
|
||||
/**
|
||||
* <p>
|
||||
@@ -127,80 +106,21 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
*/
|
||||
protected abstract void initializeFileTypeAnalyzer() throws InitializationException;
|
||||
|
||||
//</editor-fold>
|
||||
/**
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a
|
||||
* WAR or EAR, the contents are extracted, scanned, and added to the list of
|
||||
* dependencies within the engine.
|
||||
* Determines if the file can be analyzed by the analyzer.
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the engine scanning
|
||||
* @throws AnalysisException thrown if there is an analysis exception
|
||||
* @param pathname the path to the file
|
||||
* @return true if the file can be analyzed by the given analyzer; otherwise
|
||||
* false
|
||||
*/
|
||||
protected abstract void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
protected abstract String getAnalyzerEnabledSettingKey();
|
||||
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="Final implementations for the Analyzer interface">
|
||||
/**
|
||||
* Initializes the analyzer.
|
||||
*
|
||||
* @throws InitializationException thrown if there is an exception during
|
||||
* initialization
|
||||
*/
|
||||
@Override
|
||||
public final void initialize() throws InitializationException {
|
||||
if (filesMatched) {
|
||||
initializeFileTypeAnalyzer();
|
||||
} else {
|
||||
enabled = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets the enabled flag on the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public final void reset() {
|
||||
final String key = getAnalyzerEnabledSettingKey();
|
||||
try {
|
||||
enabled = Settings.getBoolean(key, true);
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.warn("Invalid setting for property '{}'", key);
|
||||
LOGGER.debug("", ex);
|
||||
LOGGER.warn("{} has been disabled", getName());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a
|
||||
* WAR or EAR, the contents are extracted, scanned, and added to the list of
|
||||
* dependencies within the engine.
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the engine scanning
|
||||
* @throws AnalysisException thrown if there is an analysis exception
|
||||
*/
|
||||
@Override
|
||||
public final void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (enabled) {
|
||||
analyzeFileType(dependency, engine);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean accept(File pathname) {
|
||||
final FileFilter filter = getFileFilter();
|
||||
boolean accepted = false;
|
||||
if (null == filter) {
|
||||
LOGGER.error("The '{}' analyzer is misconfigured and does not have a file filter; it will be disabled", getName());
|
||||
} else if (enabled) {
|
||||
} else if (this.isEnabled()) {
|
||||
accepted = filter.accept(pathname);
|
||||
if (accepted) {
|
||||
filesMatched = true;
|
||||
@@ -209,8 +129,6 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
return accepted;
|
||||
}
|
||||
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="Static utility methods">
|
||||
/**
|
||||
* <p>
|
||||
* Utility method to help in the creation of the extensions set. This
|
||||
@@ -227,6 +145,4 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
Collections.addAll(set, strings);
|
||||
return set;
|
||||
}
|
||||
|
||||
//</editor-fold>
|
||||
}
|
||||
|
||||
@@ -67,8 +67,7 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
@Override
|
||||
public void initialize() throws InitializationException {
|
||||
super.initialize();
|
||||
public void initializeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
loadSuppressionData();
|
||||
} catch (SuppressionParseException ex) {
|
||||
@@ -108,7 +107,8 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
final SuppressionParser parser = new SuppressionParser();
|
||||
File file = null;
|
||||
try {
|
||||
rules = parser.parseSuppressionRules(this.getClass().getClassLoader().getResourceAsStream("dependencycheck-base-suppression.xml"));
|
||||
final InputStream in = this.getClass().getClassLoader().getResourceAsStream("dependencycheck-base-suppression.xml");
|
||||
rules = parser.parseSuppressionRules(in);
|
||||
} catch (SAXException ex) {
|
||||
throw new SuppressionParseException("Unable to parse the base suppression data file", ex);
|
||||
}
|
||||
@@ -130,20 +130,36 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
} else {
|
||||
file = new File(suppressionFilePath);
|
||||
InputStream suppressionsFromClasspath = null;
|
||||
if (!file.exists()) {
|
||||
final InputStream suppressionsFromClasspath = this.getClass().getClassLoader().getResourceAsStream(suppressionFilePath);
|
||||
if (suppressionsFromClasspath != null) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("suppression", "xml");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyInputStreamToFile(suppressionsFromClasspath, file);
|
||||
} catch (IOException ex) {
|
||||
throwSuppressionParseException("Unable to locate suppressions file in classpath", ex);
|
||||
try {
|
||||
suppressionsFromClasspath = this.getClass().getClassLoader().getResourceAsStream(suppressionFilePath);
|
||||
if (suppressionsFromClasspath != null) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("suppression", "xml");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyInputStreamToFile(suppressionsFromClasspath, file);
|
||||
} catch (IOException ex) {
|
||||
throwSuppressionParseException("Unable to locate suppressions file in classpath", ex);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (suppressionsFromClasspath != null) {
|
||||
try {
|
||||
suppressionsFromClasspath.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Failed to close stream", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (file != null) {
|
||||
if (!file.exists()) {
|
||||
final String msg = String.format("Suppression file '%s' does not exists", file.getPath());
|
||||
LOGGER.warn(msg);
|
||||
throw new SuppressionParseException(msg);
|
||||
}
|
||||
try {
|
||||
rules.addAll(parser.parseSuppressionRules(file));
|
||||
LOGGER.debug("{} suppression rules were loaded.", rules.size());
|
||||
@@ -157,6 +173,8 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
throwSuppressionParseException("Unable to fetch the configured suppression file", ex);
|
||||
} catch (MalformedURLException ex) {
|
||||
throwSuppressionParseException("Configured suppression file has an invalid URL", ex);
|
||||
} catch (SuppressionParseException ex) {
|
||||
throw ex;
|
||||
} catch (IOException ex) {
|
||||
throwSuppressionParseException("Unable to create temp file for suppressions", ex);
|
||||
} finally {
|
||||
|
||||
@@ -36,6 +36,10 @@ public enum AnalysisPhase {
|
||||
* Information collection phase.
|
||||
*/
|
||||
INFORMATION_COLLECTION,
|
||||
/**
|
||||
* Post information collection phase.
|
||||
*/
|
||||
POST_INFORMATION_COLLECTION,
|
||||
/**
|
||||
* Pre identifier analysis phase.
|
||||
*/
|
||||
|
||||
@@ -75,4 +75,18 @@ public interface Analyzer {
|
||||
* @throws Exception is thrown if an exception occurs closing the analyzer.
|
||||
*/
|
||||
void close() throws Exception;
|
||||
|
||||
/**
|
||||
* Returns whether multiple instances of the same type of analyzer can run in parallel.
|
||||
* Note that running analyzers of different types in parallel is not supported at all.
|
||||
*
|
||||
* @return {@code true} if the analyzer supports parallel processing, {@code false} else
|
||||
*/
|
||||
boolean supportsParallelProcessing();
|
||||
/**
|
||||
* Get the value of enabled.
|
||||
*
|
||||
* @return the value of enabled
|
||||
*/
|
||||
boolean isEnabled();
|
||||
}
|
||||
|
||||
@@ -18,17 +18,14 @@
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.Closeable;
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
@@ -207,7 +204,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* files
|
||||
*/
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
public void closeAnalyzer() throws Exception {
|
||||
if (tempFileLocation != null && tempFileLocation.exists()) {
|
||||
LOGGER.debug("Attempting to delete temporary files");
|
||||
final boolean success = FileUtils.delete(tempFileLocation);
|
||||
@@ -220,6 +217,18 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Does not support parallel processing as it both modifies and iterates
|
||||
* over the engine's list of dependencies.
|
||||
*
|
||||
* @see #analyzeDependency(Dependency, Engine)
|
||||
* @see #findMoreDependencies(Engine, File)
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a
|
||||
* WAR or EAR, the contents are extracted, scanned, and added to the list of
|
||||
@@ -230,31 +239,48 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException thrown if there is an analysis exception
|
||||
*/
|
||||
@Override
|
||||
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
final File f = new File(dependency.getActualFilePath());
|
||||
final File tmpDir = getNextTempDirectory();
|
||||
extractFiles(f, tmpDir, engine);
|
||||
|
||||
//make a copy
|
||||
final Set<Dependency> dependencySet = findMoreDependencies(engine, tmpDir);
|
||||
if (!dependencySet.isEmpty()) {
|
||||
for (Dependency d : dependencySet) {
|
||||
//fix the dependency's display name and path
|
||||
final String displayPath = String.format("%s%s",
|
||||
dependency.getFilePath(),
|
||||
d.getActualFilePath().substring(tmpDir.getAbsolutePath().length()));
|
||||
final String displayName = String.format("%s: %s",
|
||||
dependency.getFileName(),
|
||||
d.getFileName());
|
||||
d.setFilePath(displayPath);
|
||||
d.setFileName(displayName);
|
||||
final List<Dependency> dependencySet = findMoreDependencies(engine, tmpDir);
|
||||
|
||||
//TODO - can we get more evidence from the parent? EAR contains module name, etc.
|
||||
//analyze the dependency (i.e. extract files) if it is a supported type.
|
||||
if (this.accept(d.getActualFile()) && scanDepth < MAX_SCAN_DEPTH) {
|
||||
scanDepth += 1;
|
||||
analyze(d, engine);
|
||||
scanDepth -= 1;
|
||||
if (dependencySet != null && !dependencySet.isEmpty()) {
|
||||
for (Dependency d : dependencySet) {
|
||||
if (d.getFilePath().startsWith(tmpDir.getAbsolutePath())) {
|
||||
//fix the dependency's display name and path
|
||||
final String displayPath = String.format("%s%s",
|
||||
dependency.getFilePath(),
|
||||
d.getActualFilePath().substring(tmpDir.getAbsolutePath().length()));
|
||||
final String displayName = String.format("%s: %s",
|
||||
dependency.getFileName(),
|
||||
d.getFileName());
|
||||
d.setFilePath(displayPath);
|
||||
d.setFileName(displayName);
|
||||
d.setProjectReferences(dependency.getProjectReferences());
|
||||
|
||||
//TODO - can we get more evidence from the parent? EAR contains module name, etc.
|
||||
//analyze the dependency (i.e. extract files) if it is a supported type.
|
||||
if (this.accept(d.getActualFile()) && scanDepth < MAX_SCAN_DEPTH) {
|
||||
scanDepth += 1;
|
||||
analyze(d, engine);
|
||||
scanDepth -= 1;
|
||||
}
|
||||
} else {
|
||||
for (Dependency sub : dependencySet) {
|
||||
if (sub.getFilePath().startsWith(tmpDir.getAbsolutePath())) {
|
||||
final String displayPath = String.format("%s%s",
|
||||
dependency.getFilePath(),
|
||||
sub.getActualFilePath().substring(tmpDir.getAbsolutePath().length()));
|
||||
final String displayName = String.format("%s: %s",
|
||||
dependency.getFileName(),
|
||||
sub.getFileName());
|
||||
sub.setFilePath(displayPath);
|
||||
sub.setFileName(displayName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -279,30 +305,37 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final String fileName = dependency.getFileName();
|
||||
|
||||
LOGGER.info("The zip file '{}' appears to be a JAR file, making a copy and analyzing it as a JAR.", fileName);
|
||||
|
||||
final File tmpLoc = new File(tdir, fileName.substring(0, fileName.length() - 3) + "jar");
|
||||
//store the archives sha1 and change it so that the engine doesn't think the zip and jar file are the same
|
||||
// and add it is a related dependency.
|
||||
final String archiveSha1 = dependency.getSha1sum();
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyFile(tdir, tmpLoc);
|
||||
final Set<Dependency> dependencySet = findMoreDependencies(engine, tmpLoc);
|
||||
if (!dependencySet.isEmpty()) {
|
||||
if (dependencySet.size() != 1) {
|
||||
LOGGER.info("Deep copy of ZIP to JAR file resulted in more than one dependency?");
|
||||
}
|
||||
dependency.setSha1sum("");
|
||||
org.apache.commons.io.FileUtils.copyFile(dependency.getActualFile(), tmpLoc);
|
||||
final List<Dependency> dependencySet = findMoreDependencies(engine, tmpLoc);
|
||||
if (dependencySet != null && !dependencySet.isEmpty()) {
|
||||
for (Dependency d : dependencySet) {
|
||||
//fix the dependency's display name and path
|
||||
d.setFilePath(dependency.getFilePath());
|
||||
d.setDisplayFileName(dependency.getFileName());
|
||||
if (d.getActualFile().equals(tmpLoc)) {
|
||||
d.setFilePath(dependency.getFilePath());
|
||||
d.setDisplayFileName(dependency.getFileName());
|
||||
} else {
|
||||
for (Dependency sub : d.getRelatedDependencies()) {
|
||||
if (sub.getActualFile().equals(tmpLoc)) {
|
||||
sub.setFilePath(dependency.getFilePath());
|
||||
sub.setDisplayFileName(dependency.getFileName());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Unable to perform deep copy on '{}'", dependency.getActualFile().getPath(), ex);
|
||||
} finally {
|
||||
dependency.setSha1sum(archiveSha1);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* An empty dependency set.
|
||||
*/
|
||||
private static final Set<Dependency> EMPTY_DEPENDENCY_SET = Collections.emptySet();
|
||||
|
||||
/**
|
||||
* Scan the given file/folder, and return any new dependencies found.
|
||||
@@ -311,20 +344,9 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @param file target of scanning
|
||||
* @return any dependencies that weren't known to the engine before
|
||||
*/
|
||||
private static Set<Dependency> findMoreDependencies(Engine engine, File file) {
|
||||
final List<Dependency> before = new ArrayList<Dependency>(engine.getDependencies());
|
||||
engine.scan(file);
|
||||
final List<Dependency> after = engine.getDependencies();
|
||||
final boolean sizeChanged = before.size() != after.size();
|
||||
final Set<Dependency> newDependencies;
|
||||
if (sizeChanged) {
|
||||
//get the new dependencies
|
||||
newDependencies = new HashSet<Dependency>(after);
|
||||
newDependencies.removeAll(before);
|
||||
} else {
|
||||
newDependencies = EMPTY_DEPENDENCY_SET;
|
||||
}
|
||||
return newDependencies;
|
||||
private static List<Dependency> findMoreDependencies(Engine engine, File file) {
|
||||
final List<Dependency> added = engine.scan(file);
|
||||
return added;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -357,32 +379,49 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
private void extractFiles(File archive, File destination, Engine engine) throws AnalysisException {
|
||||
if (archive != null && destination != null) {
|
||||
FileInputStream fis;
|
||||
String archiveExt = FileUtils.getFileExtension(archive.getName());
|
||||
if (archiveExt == null) {
|
||||
return;
|
||||
}
|
||||
archiveExt = archiveExt.toLowerCase();
|
||||
|
||||
final FileInputStream fis;
|
||||
try {
|
||||
fis = new FileInputStream(archive);
|
||||
} catch (FileNotFoundException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new AnalysisException("Archive file was not found.", ex);
|
||||
}
|
||||
final String archiveExt = FileUtils.getFileExtension(archive.getName()).toLowerCase();
|
||||
BufferedInputStream in = null;
|
||||
ZipArchiveInputStream zin = null;
|
||||
TarArchiveInputStream tin = null;
|
||||
GzipCompressorInputStream gin = null;
|
||||
BZip2CompressorInputStream bzin = null;
|
||||
try {
|
||||
if (ZIPPABLES.contains(archiveExt)) {
|
||||
final BufferedInputStream in = new BufferedInputStream(fis);
|
||||
in = new BufferedInputStream(fis);
|
||||
ensureReadableJar(archiveExt, in);
|
||||
extractArchive(new ZipArchiveInputStream(in), destination, engine);
|
||||
zin = new ZipArchiveInputStream(in);
|
||||
extractArchive(zin, destination, engine);
|
||||
} else if ("tar".equals(archiveExt)) {
|
||||
extractArchive(new TarArchiveInputStream(new BufferedInputStream(fis)), destination, engine);
|
||||
in = new BufferedInputStream(fis);
|
||||
tin = new TarArchiveInputStream(in);
|
||||
extractArchive(tin, destination, engine);
|
||||
} else if ("gz".equals(archiveExt) || "tgz".equals(archiveExt)) {
|
||||
final String uncompressedName = GzipUtils.getUncompressedFilename(archive.getName());
|
||||
final File f = new File(destination, uncompressedName);
|
||||
if (engine.accept(f)) {
|
||||
decompressFile(new GzipCompressorInputStream(new BufferedInputStream(fis)), f);
|
||||
in = new BufferedInputStream(fis);
|
||||
gin = new GzipCompressorInputStream(in);
|
||||
decompressFile(gin, f);
|
||||
}
|
||||
} else if ("bz2".equals(archiveExt) || "tbz2".equals(archiveExt)) {
|
||||
final String uncompressedName = BZip2Utils.getUncompressedFilename(archive.getName());
|
||||
final File f = new File(destination, uncompressedName);
|
||||
if (engine.accept(f)) {
|
||||
decompressFile(new BZip2CompressorInputStream(new BufferedInputStream(fis)), f);
|
||||
in = new BufferedInputStream(fis);
|
||||
bzin = new BZip2CompressorInputStream(in);
|
||||
decompressFile(bzin, f);
|
||||
}
|
||||
}
|
||||
} catch (ArchiveExtractionException ex) {
|
||||
@@ -392,7 +431,14 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.warn("Exception reading archive '{}'.", archive.getName());
|
||||
LOGGER.debug("", ex);
|
||||
} finally {
|
||||
close(fis);
|
||||
//overly verbose and not needed... but keeping it anyway due to
|
||||
//having issue with file handles being left open
|
||||
FileUtils.close(fis);
|
||||
FileUtils.close(in);
|
||||
FileUtils.close(zin);
|
||||
FileUtils.close(tin);
|
||||
FileUtils.close(gin);
|
||||
FileUtils.close(bzin);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -414,8 +460,9 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if ("jar".equals(archiveExt) && in.markSupported()) {
|
||||
in.mark(7);
|
||||
final byte[] b = new byte[7];
|
||||
in.read(b);
|
||||
if (b[0] == '#'
|
||||
final int read = in.read(b);
|
||||
if (read == 7
|
||||
&& b[0] == '#'
|
||||
&& b[1] == '!'
|
||||
&& b[2] == '/'
|
||||
&& b[3] == 'b'
|
||||
@@ -441,6 +488,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
in.reset();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -471,7 +520,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
} catch (Throwable ex) {
|
||||
throw new ArchiveExtractionException(ex);
|
||||
} finally {
|
||||
close(input);
|
||||
FileUtils.close(input);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -502,7 +551,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final String msg = String.format("IO Exception while parsing file '%s'.", file.getName());
|
||||
throw new AnalysisException(msg, ex);
|
||||
} finally {
|
||||
close(fos);
|
||||
FileUtils.close(fos);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -527,23 +576,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.debug("", ex);
|
||||
throw new ArchiveExtractionException(ex);
|
||||
} finally {
|
||||
close(out);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the given {@link Closeable} instance, ignoring nulls, and logging
|
||||
* any thrown {@link IOException}.
|
||||
*
|
||||
* @param closeable to be closed
|
||||
*/
|
||||
private static void close(Closeable closeable) {
|
||||
if (null != closeable) {
|
||||
try {
|
||||
closeable.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("", ex);
|
||||
}
|
||||
FileUtils.close(out);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -37,7 +37,6 @@ import org.w3c.dom.Document;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
@@ -45,6 +44,8 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.apache.commons.lang3.SystemUtils;
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
|
||||
/**
|
||||
* Analyzer for getting company, product, and version information from a .NET
|
||||
@@ -71,10 +72,6 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* The temp value for GrokAssembly.exe
|
||||
*/
|
||||
private File grokAssemblyExe = null;
|
||||
/**
|
||||
* The DocumentBuilder for parsing the XML
|
||||
*/
|
||||
private DocumentBuilder builder;
|
||||
/**
|
||||
* Logger
|
||||
*/
|
||||
@@ -85,18 +82,19 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*
|
||||
* @return the list of arguments to begin populating the ProcessBuilder
|
||||
*/
|
||||
private List<String> buildArgumentList() {
|
||||
protected List<String> buildArgumentList() {
|
||||
// Use file.separator as a wild guess as to whether this is Windows
|
||||
final List<String> args = new ArrayList<String>();
|
||||
if (!"\\".equals(System.getProperty("file.separator"))) {
|
||||
if (!SystemUtils.IS_OS_WINDOWS) {
|
||||
if (Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH) != null) {
|
||||
args.add(Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH));
|
||||
} else {
|
||||
} else if (isInPath("mono")) {
|
||||
args.add("mono");
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
args.add(grokAssemblyExe.getPath());
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
@@ -108,7 +106,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException if anything goes sideways
|
||||
*/
|
||||
@Override
|
||||
public void analyzeFileType(Dependency dependency, Engine engine)
|
||||
public void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
if (grokAssemblyExe == null) {
|
||||
LOGGER.warn("GrokAssembly didn't get deployed");
|
||||
@@ -116,11 +114,16 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
final List<String> args = buildArgumentList();
|
||||
if (args == null) {
|
||||
LOGGER.warn("Assembly Analyzer was unable to execute");
|
||||
return;
|
||||
}
|
||||
args.add(dependency.getActualFilePath());
|
||||
final ProcessBuilder pb = new ProcessBuilder(args);
|
||||
Document doc = null;
|
||||
try {
|
||||
final Process proc = pb.start();
|
||||
final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
|
||||
|
||||
doc = builder.parse(proc.getInputStream());
|
||||
|
||||
@@ -170,10 +173,16 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
product, Confidence.HIGH));
|
||||
}
|
||||
|
||||
} catch (ParserConfigurationException pce) {
|
||||
throw new AnalysisException("Error initializing the assembly analyzer", pce);
|
||||
} catch (IOException ioe) {
|
||||
throw new AnalysisException(ioe);
|
||||
} catch (SAXException saxe) {
|
||||
throw new AnalysisException("Couldn't parse GrokAssembly result", saxe);
|
||||
LOGGER.error("----------------------------------------------------");
|
||||
LOGGER.error("Failed to read the Assembly Analyzer results. "
|
||||
+ "On some systems mono-runtime and mono-devel need to be installed.");
|
||||
LOGGER.error("----------------------------------------------------");
|
||||
throw new AnalysisException("Couldn't parse Assembly Analzyzer results (GrokAssembly)", saxe);
|
||||
} catch (XPathExpressionException xpe) {
|
||||
// This shouldn't happen
|
||||
throw new AnalysisException(xpe);
|
||||
@@ -203,8 +212,6 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
IOUtils.copy(is, fos);
|
||||
|
||||
grokAssemblyExe = tempFile;
|
||||
// Set the temp file to get deleted when we're done
|
||||
grokAssemblyExe.deleteOnExit();
|
||||
LOGGER.debug("Extracted GrokAssembly.exe to {}", grokAssemblyExe.getPath());
|
||||
} catch (IOException ioe) {
|
||||
this.setEnabled(false);
|
||||
@@ -229,13 +236,31 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
// Now, need to see if GrokAssembly actually runs from this location.
|
||||
final List<String> args = buildArgumentList();
|
||||
//TODO this creates an "unreported" error - if someone doesn't look
|
||||
// at the command output this could easily be missed (especially in an
|
||||
// Ant or Maven build.
|
||||
//
|
||||
// We need to create a non-fatal warning error type that will
|
||||
// get added to the report.
|
||||
//TOOD this idea needs to get replicated to the bundle audit analyzer.
|
||||
if (args == null) {
|
||||
setEnabled(false);
|
||||
LOGGER.error("----------------------------------------------------");
|
||||
LOGGER.error(".NET Assembly Analyzer could not be initialized and at least one "
|
||||
+ "'exe' or 'dll' was scanned. The 'mono' executable could not be found on "
|
||||
+ "the path; either disable the Assembly Analyzer or configure the path mono. "
|
||||
+ "On some systems mono-runtime and mono-devel need to be installed.");
|
||||
LOGGER.error("----------------------------------------------------");
|
||||
return;
|
||||
}
|
||||
try {
|
||||
final ProcessBuilder pb = new ProcessBuilder(args);
|
||||
final Process p = pb.start();
|
||||
// Try evacuating the error stream
|
||||
IOUtils.copy(p.getErrorStream(), NullOutputStream.NULL_OUTPUT_STREAM);
|
||||
|
||||
final Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(p.getInputStream());
|
||||
final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
|
||||
final Document doc = builder.parse(p.getInputStream());
|
||||
final XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
final String error = xpath.evaluate("/assembly/error", doc);
|
||||
if (p.waitFor() != 1 || error == null || error.isEmpty()) {
|
||||
@@ -246,6 +271,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
throw new InitializationException("Could not execute .NET AssemblyAnalyzer");
|
||||
}
|
||||
} catch (InitializationException e) {
|
||||
setEnabled(false);
|
||||
throw e;
|
||||
} catch (Throwable e) {
|
||||
LOGGER.warn("An error occurred with the .NET AssemblyAnalyzer;\n"
|
||||
@@ -254,12 +280,6 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("An error occurred with the .NET AssemblyAnalyzer", e);
|
||||
}
|
||||
try {
|
||||
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
|
||||
} catch (ParserConfigurationException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Error initializing the assembly analyzer", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -268,14 +288,15 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws Exception thrown if there is a problem closing the analyzer
|
||||
*/
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
super.close();
|
||||
public void closeAnalyzer() throws Exception {
|
||||
try {
|
||||
if (grokAssemblyExe != null && !grokAssemblyExe.delete()) {
|
||||
LOGGER.debug("Unable to delete temporary GrokAssembly.exe; attempting delete on exit");
|
||||
grokAssemblyExe.deleteOnExit();
|
||||
}
|
||||
} catch (SecurityException se) {
|
||||
LOGGER.debug("Can't delete temporary GrokAssembly.exe");
|
||||
grokAssemblyExe.deleteOnExit();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -320,4 +341,29 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests to see if a file is in the system path. <b>Note</b> - the current
|
||||
* implementation only works on non-windows platforms. For purposes of the
|
||||
* AssemblyAnalyzer this is okay as this is only needed on Mac/*nix.
|
||||
*
|
||||
* @param file the executable to look for
|
||||
* @return <code>true</code> if the file exists; otherwise
|
||||
* <code>false</code>
|
||||
*/
|
||||
private boolean isInPath(String file) {
|
||||
final ProcessBuilder pb = new ProcessBuilder("which", file);
|
||||
try {
|
||||
final Process proc = pb.start();
|
||||
final int retCode = proc.waitFor();
|
||||
if (retCode == 0) {
|
||||
return true;
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Path seach failed for " + file);
|
||||
} catch (InterruptedException ex) {
|
||||
LOGGER.debug("Path seach failed for " + file);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,8 +31,6 @@ import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
@@ -156,7 +154,7 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeFileType(Dependency dependency, Engine engine)
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
final File actualFile = dependency.getActualFile();
|
||||
final String name = actualFile.getName();
|
||||
@@ -178,11 +176,7 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// copy, alter and set in case some other thread is iterating over
|
||||
final List<Dependency> dependencies = new ArrayList<Dependency>(
|
||||
engine.getDependencies());
|
||||
dependencies.remove(dependency);
|
||||
engine.setDependencies(dependencies);
|
||||
engine.getDependencies().remove(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -92,24 +92,10 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(".cmake")
|
||||
.addFilenames("CMakeLists.txt").build();
|
||||
|
||||
/**
|
||||
* A reference to SHA1 message digest.
|
||||
*/
|
||||
private static MessageDigest sha1 = null;
|
||||
|
||||
static {
|
||||
try {
|
||||
sha1 = MessageDigest.getInstance("SHA1");
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
LOGGER.error(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the CMake analyzer.
|
||||
*
|
||||
* @return the name of the analyzer
|
||||
*
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
@@ -137,13 +123,19 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* No-op initializer implementation.
|
||||
* Initializes the analyzer.
|
||||
*
|
||||
* @throws InitializationException never thrown
|
||||
* @throws InitializationException thrown if an exception occurs getting an
|
||||
* instance of SHA1
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
// Nothing to do here.
|
||||
try {
|
||||
getSha1MessageDigest();
|
||||
} catch (IllegalStateException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unable to create SHA1 MessageDigest", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -155,7 +147,7 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* analyzing the dependency
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeFileType(Dependency dependency, Engine engine)
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
final File file = dependency.getActualFile();
|
||||
final String parentName = file.getParentFile().getName();
|
||||
@@ -196,6 +188,9 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @param engine the dependency-check engine
|
||||
* @param contents the version information
|
||||
*/
|
||||
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings(
|
||||
value = "DM_DEFAULT_ENCODING",
|
||||
justification = "Default encoding is only used if UTF-8 is not available")
|
||||
private void analyzeSetVersionCommand(Dependency dependency, Engine engine, String contents) {
|
||||
Dependency currentDep = dependency;
|
||||
|
||||
@@ -226,6 +221,7 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
} catch (UnsupportedEncodingException ex) {
|
||||
path = filePath.getBytes();
|
||||
}
|
||||
final MessageDigest sha1 = getSha1MessageDigest();
|
||||
currentDep.setSha1sum(Checksum.getHex(sha1.digest(path)));
|
||||
engine.getDependencies().add(currentDep);
|
||||
}
|
||||
@@ -242,4 +238,18 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_CMAKE_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the sha1 message digest.
|
||||
*
|
||||
* @return the sha1 message digest
|
||||
*/
|
||||
private MessageDigest getSha1MessageDigest() {
|
||||
try {
|
||||
return MessageDigest.getInstance("SHA1");
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
LOGGER.error(e.getMessage());
|
||||
throw new IllegalStateException("Failed to obtain the SHA1 message digest.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,6 +25,8 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.StringTokenizer;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.apache.commons.lang3.builder.CompareToBuilder;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.queryparser.classic.ParseException;
|
||||
@@ -48,6 +50,7 @@ import org.owasp.dependencycheck.dependency.VulnerableSoftware;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersion;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -58,7 +61,7 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class CPEAnalyzer implements Analyzer {
|
||||
public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger.
|
||||
@@ -120,7 +123,14 @@ public class CPEAnalyzer implements Analyzer {
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return AnalysisPhase.IDENTIFIER_ANALYSIS;
|
||||
}
|
||||
|
||||
/**
|
||||
* The default is to support parallel processing.
|
||||
* @return false
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Creates the CPE Lucene Index.
|
||||
*
|
||||
@@ -128,7 +138,7 @@ public class CPEAnalyzer implements Analyzer {
|
||||
* the index.
|
||||
*/
|
||||
@Override
|
||||
public void initialize() throws InitializationException {
|
||||
public void initializeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
this.open();
|
||||
} catch (IOException ex) {
|
||||
@@ -154,10 +164,10 @@ public class CPEAnalyzer implements Analyzer {
|
||||
cve.open();
|
||||
cpe = CpeMemoryIndex.getInstance();
|
||||
try {
|
||||
LOGGER.info("Creating the CPE Index");
|
||||
final long creationStart = System.currentTimeMillis();
|
||||
cpe.open(cve);
|
||||
LOGGER.info("CPE Index Created ({} ms)", System.currentTimeMillis() - creationStart);
|
||||
final long creationSeconds = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - creationStart);
|
||||
LOGGER.info("Created CPE Index ({} seconds)", creationSeconds);
|
||||
} catch (IndexException ex) {
|
||||
LOGGER.debug("IndexException", ex);
|
||||
throw new DatabaseException(ex);
|
||||
@@ -169,7 +179,7 @@ public class CPEAnalyzer implements Analyzer {
|
||||
* Closes the data sources.
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
public void closeAnalyzer() {
|
||||
if (cpe != null) {
|
||||
cpe.close();
|
||||
cpe = null;
|
||||
@@ -513,7 +523,7 @@ public class CPEAnalyzer implements Analyzer {
|
||||
* dependency.
|
||||
*/
|
||||
@Override
|
||||
public synchronized void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
protected synchronized void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
try {
|
||||
determineCPE(dependency);
|
||||
} catch (CorruptIndexException ex) {
|
||||
@@ -550,7 +560,7 @@ public class CPEAnalyzer implements Analyzer {
|
||||
final List<IdentifierMatch> collected = new ArrayList<IdentifierMatch>();
|
||||
|
||||
//TODO the following algorithm incorrectly identifies things as a lower version
|
||||
// if there lower confidence evidence when the current (highest) version number
|
||||
// if there lower confidence evidence when the current (highest) version number
|
||||
// is newer then anything in the NVD.
|
||||
for (Confidence conf : Confidence.values()) {
|
||||
for (Evidence evidence : dependency.getVersionEvidence().iterator(conf)) {
|
||||
@@ -626,6 +636,17 @@ public class CPEAnalyzer implements Analyzer {
|
||||
return identifierAdded;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_CPE_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* The confidence whether the identifier is an exact match, or a best guess.
|
||||
*/
|
||||
@@ -801,14 +822,11 @@ public class CPEAnalyzer implements Analyzer {
|
||||
*/
|
||||
@Override
|
||||
public int compareTo(IdentifierMatch o) {
|
||||
int conf = this.confidence.compareTo(o.confidence);
|
||||
if (conf == 0) {
|
||||
conf = this.evidenceConfidence.compareTo(o.evidenceConfidence);
|
||||
if (conf == 0) {
|
||||
conf = identifier.compareTo(o.identifier);
|
||||
}
|
||||
}
|
||||
return conf;
|
||||
return new CompareToBuilder()
|
||||
.append(confidence, o.confidence)
|
||||
.append(evidenceConfidence, o.evidenceConfidence)
|
||||
.append(identifier, o.identifier)
|
||||
.toComparison();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,7 +75,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* The analyzer should be disabled if there are errors, so this is a flag to
|
||||
* determine if such an error has occurred.
|
||||
*/
|
||||
private boolean errorFlag = false;
|
||||
private volatile boolean errorFlag = false;
|
||||
|
||||
/**
|
||||
* The searcher itself.
|
||||
@@ -193,7 +193,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException when there's an exception during analysis
|
||||
*/
|
||||
@Override
|
||||
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (errorFlag || !isEnabled()) {
|
||||
return;
|
||||
}
|
||||
@@ -229,7 +229,8 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.warn("Unable to download pom.xml for {} from Central; "
|
||||
+ "this could result in undetected CPE/CVEs.", dependency.getFileName());
|
||||
} finally {
|
||||
if (pomFile != null && !FileUtils.deleteQuietly(pomFile)) {
|
||||
if (pomFile != null && pomFile.exists() && !FileUtils.deleteQuietly(pomFile)) {
|
||||
LOGGER.debug("Failed to delete temporary pom file {}", pomFile.toString());
|
||||
pomFile.deleteOnExit();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,205 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2016 IBM Corporation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
* This analyzer is used to analyze SWIFT and Objective-C packages by collecting
|
||||
* information from .podspec files. CocoaPods dependency manager see
|
||||
* https://cocoapods.org/.
|
||||
*
|
||||
* @author Bianca Jiang (https://twitter.com/biancajiang)
|
||||
*/
|
||||
@Experimental
|
||||
public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
// private static final Logger LOGGER = LoggerFactory.getLogger(CocoaPodsAnalyzer.class);
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "CocoaPods Package Analyzer";
|
||||
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
|
||||
|
||||
/**
|
||||
* The file name to scan.
|
||||
*/
|
||||
public static final String PODSPEC = "podspec";
|
||||
/**
|
||||
* Filter that detects files named "*.podspec".
|
||||
*/
|
||||
private static final FileFilter PODSPEC_FILTER = FileFilterBuilder.newInstance().addExtensions(PODSPEC).build();
|
||||
|
||||
/**
|
||||
* The capture group #1 is the block variable. e.g. "Pod::Spec.new do
|
||||
* |spec|"
|
||||
*/
|
||||
private static final Pattern PODSPEC_BLOCK_PATTERN = Pattern.compile("Pod::Spec\\.new\\s+?do\\s+?\\|(.+?)\\|");
|
||||
|
||||
/**
|
||||
* Returns the FileFilter
|
||||
*
|
||||
* @return the FileFilter
|
||||
*/
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return PODSPEC_FILTER;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() {
|
||||
// NO-OP
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase that the analyzer is intended to run in.
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_COCOAPODS_ENABLED;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
|
||||
String contents;
|
||||
try {
|
||||
contents = FileUtils.readFileToString(dependency.getActualFile(), Charset.defaultCharset());
|
||||
} catch (IOException e) {
|
||||
throw new AnalysisException(
|
||||
"Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
final Matcher matcher = PODSPEC_BLOCK_PATTERN.matcher(contents);
|
||||
if (matcher.find()) {
|
||||
contents = contents.substring(matcher.end());
|
||||
final String blockVariable = matcher.group(1);
|
||||
|
||||
final EvidenceCollection vendor = dependency.getVendorEvidence();
|
||||
final EvidenceCollection product = dependency.getProductEvidence();
|
||||
final EvidenceCollection version = dependency.getVersionEvidence();
|
||||
|
||||
final String name = addStringEvidence(product, contents, blockVariable, "name", "name", Confidence.HIGHEST);
|
||||
if (!name.isEmpty()) {
|
||||
vendor.addEvidence(PODSPEC, "name_project", name, Confidence.HIGHEST);
|
||||
}
|
||||
addStringEvidence(product, contents, blockVariable, "summary", "summary", Confidence.HIGHEST);
|
||||
|
||||
addStringEvidence(vendor, contents, blockVariable, "author", "authors?", Confidence.HIGHEST);
|
||||
addStringEvidence(vendor, contents, blockVariable, "homepage", "homepage", Confidence.HIGHEST);
|
||||
addStringEvidence(vendor, contents, blockVariable, "license", "licen[cs]es?", Confidence.HIGHEST);
|
||||
|
||||
addStringEvidence(version, contents, blockVariable, "version", "version", Confidence.HIGHEST);
|
||||
}
|
||||
|
||||
setPackagePath(dependency);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts evidence from the contents and adds it to the given evidence
|
||||
* collection.
|
||||
*
|
||||
* @param evidences the evidence collection to update
|
||||
* @param contents the text to extract evidence from
|
||||
* @param blockVariable the block variable within the content to search for
|
||||
* @param field the name of the field being searched for
|
||||
* @param fieldPattern the field pattern within the contents to search for
|
||||
* @param confidence the confidence level of the evidence if found
|
||||
* @return the string that was added as evidence
|
||||
*/
|
||||
private String addStringEvidence(EvidenceCollection evidences, String contents,
|
||||
String blockVariable, String field, String fieldPattern, Confidence confidence) {
|
||||
String value = "";
|
||||
|
||||
//capture array value between [ ]
|
||||
final Matcher arrayMatcher = Pattern.compile(
|
||||
String.format("\\s*?%s\\.%s\\s*?=\\s*?\\{\\s*?(.*?)\\s*?\\}", blockVariable, fieldPattern),
|
||||
Pattern.CASE_INSENSITIVE).matcher(contents);
|
||||
if (arrayMatcher.find()) {
|
||||
value = arrayMatcher.group(1);
|
||||
} else { //capture single value between quotes
|
||||
final Matcher matcher = Pattern.compile(
|
||||
String.format("\\s*?%s\\.%s\\s*?=\\s*?(['\"])(.*?)\\1", blockVariable, fieldPattern),
|
||||
Pattern.CASE_INSENSITIVE).matcher(contents);
|
||||
if (matcher.find()) {
|
||||
value = matcher.group(2);
|
||||
}
|
||||
}
|
||||
if (value.length() > 0) {
|
||||
evidences.addEvidence(PODSPEC, field, value, confidence);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the package path on the given dependency.
|
||||
*
|
||||
* @param dep the dependency to update
|
||||
*/
|
||||
private void setPackagePath(Dependency dep) {
|
||||
final File file = new File(dep.getFilePath());
|
||||
final String parent = file.getParent();
|
||||
if (parent != null) {
|
||||
dep.setPackagePath(parent);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -24,6 +24,7 @@ import org.owasp.dependencycheck.data.composer.ComposerException;
|
||||
import org.owasp.dependencycheck.data.composer.ComposerLockParser;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.Checksum;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
@@ -36,7 +37,6 @@ import java.io.FileNotFoundException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
* Used to analyze a composer.lock file for a composer PHP app.
|
||||
@@ -85,19 +85,13 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
sha1 = MessageDigest.getInstance("SHA1");
|
||||
} catch (NoSuchAlgorithmException ex) {
|
||||
getSha1MessageDigest();
|
||||
} catch (IllegalStateException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unable to create SHA1 MmessageDigest", ex);
|
||||
throw new InitializationException("Unable to create SHA1 MessageDigest", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The MessageDigest for calculating a new digest for the new dependencies
|
||||
* added.
|
||||
*/
|
||||
private MessageDigest sha1 = null;
|
||||
|
||||
/**
|
||||
* Entry point for the analyzer.
|
||||
*
|
||||
@@ -106,7 +100,7 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException if there's a failure during analysis
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
FileInputStream fis = null;
|
||||
try {
|
||||
fis = new FileInputStream(dependency.getActualFile());
|
||||
@@ -117,6 +111,7 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final Dependency d = new Dependency(dependency.getActualFile());
|
||||
d.setDisplayFileName(String.format("%s:%s/%s", dependency.getDisplayFileName(), dep.getGroup(), dep.getProject()));
|
||||
final String filePath = String.format("%s:%s/%s", dependency.getFilePath(), dep.getGroup(), dep.getProject());
|
||||
final MessageDigest sha1 = getSha1MessageDigest();
|
||||
d.setFilePath(filePath);
|
||||
d.setSha1sum(Checksum.getHex(sha1.digest(filePath.getBytes(Charset.defaultCharset()))));
|
||||
d.getVendorEvidence().addEvidence(COMPOSER_LOCK, "vendor", dep.getGroup(), Confidence.HIGHEST);
|
||||
@@ -169,4 +164,18 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return AnalysisPhase.INFORMATION_COLLECTION;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the sha1 message digest.
|
||||
*
|
||||
* @return the sha1 message digest
|
||||
*/
|
||||
private MessageDigest getSha1MessageDigest() {
|
||||
try {
|
||||
return MessageDigest.getInstance("SHA1");
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
LOGGER.error(e.getMessage());
|
||||
throw new IllegalStateException("Failed to obtain the SHA1 message digest.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ package org.owasp.dependencycheck.analyzer;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.xml.suppression.SuppressionRule;
|
||||
|
||||
/**
|
||||
@@ -62,7 +63,7 @@ public class CpeSuppressionAnalyzer extends AbstractSuppressionAnalyzer {
|
||||
//</editor-fold>
|
||||
|
||||
@Override
|
||||
public void analyze(final Dependency dependency, final Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
|
||||
if (getRules() == null || getRules().size() <= 0) {
|
||||
return;
|
||||
@@ -72,4 +73,15 @@ public class CpeSuppressionAnalyzer extends AbstractSuppressionAnalyzer {
|
||||
rule.process(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_CPE_SUPPRESSION_ENABLED;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,6 +30,7 @@ import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersion;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -46,7 +47,7 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger.
|
||||
@@ -58,10 +59,23 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
* A pattern for obtaining the first part of a filename.
|
||||
*/
|
||||
private static final Pattern STARTING_TEXT_PATTERN = Pattern.compile("^[a-zA-Z0-9]*");
|
||||
|
||||
/**
|
||||
* a flag indicating if this analyzer has run. This analyzer only runs once.
|
||||
*/
|
||||
private boolean analyzed = false;
|
||||
|
||||
/**
|
||||
* Returns a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once. Note this is currently only used in the unit tests.
|
||||
*
|
||||
* @return a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once
|
||||
*/
|
||||
protected boolean getAnalyzed() {
|
||||
return analyzed;
|
||||
}
|
||||
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
@@ -71,7 +85,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.PRE_FINDING_ANALYSIS;
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.FINAL;
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
@@ -94,6 +108,29 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Does not support parallel processing as it only runs once and then
|
||||
* operates on <em>all</em> dependencies.
|
||||
*
|
||||
* @return whether or not parallel processing is enabled
|
||||
* @see #analyze(Dependency, Engine)
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_DEPENDENCY_BUNDLING_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a set of dependencies. If they have been found to have the same
|
||||
* base path and the same set of identifiers they are likely related. The
|
||||
@@ -105,7 +142,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
* file.
|
||||
*/
|
||||
@Override
|
||||
public void analyze(Dependency ignore, Engine engine) throws AnalysisException {
|
||||
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException {
|
||||
if (!analyzed) {
|
||||
analyzed = true;
|
||||
final Set<Dependency> dependenciesToRemove = new HashSet<Dependency>();
|
||||
@@ -136,6 +173,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
}
|
||||
} else if (cpeIdentifiersMatch(dependency, nextDependency)
|
||||
&& hasSameBasePath(dependency, nextDependency)
|
||||
&& vulnCountMatches(dependency, nextDependency)
|
||||
&& fileNameMatch(dependency, nextDependency)) {
|
||||
if (isCore(dependency, nextDependency)) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
@@ -143,14 +181,6 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
} else if (isSameRubyGem(dependency, nextDependency)) {
|
||||
final Dependency main = getMainGemspecDependency(dependency, nextDependency);
|
||||
if (main == dependency) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -192,7 +222,12 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
* @return a string representing the base path.
|
||||
*/
|
||||
private String getBaseRepoPath(final String path) {
|
||||
int pos = path.indexOf("repository" + File.separator) + 11;
|
||||
int pos;
|
||||
if (path.contains("local-repo")) {
|
||||
pos = path.indexOf("local-repo" + File.separator) + 11;
|
||||
} else {
|
||||
pos = path.indexOf("repository" + File.separator) + 11;
|
||||
}
|
||||
if (pos < 0) {
|
||||
return path;
|
||||
}
|
||||
@@ -285,6 +320,19 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
return matches;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the two dependencies have the same vulnerability count.
|
||||
*
|
||||
* @param dependency1 a dependency2 to compare
|
||||
* @param dependency2 a dependency2 to compare
|
||||
* @return true if the two dependencies have the same vulnerability count
|
||||
*/
|
||||
private boolean vulnCountMatches(Dependency dependency1, Dependency dependency2) {
|
||||
return dependency1.getVulnerabilities() != null && dependency2.getVulnerabilities() != null
|
||||
&& dependency1.getVulnerabilities().size() == dependency2.getVulnerabilities().size();
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the two dependencies have the same base path.
|
||||
*
|
||||
@@ -302,11 +350,14 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
String right = rFile.getParent();
|
||||
if (left == null) {
|
||||
return right == null;
|
||||
} else if (right == null) {
|
||||
return false;
|
||||
}
|
||||
if (left.equalsIgnoreCase(right)) {
|
||||
return true;
|
||||
}
|
||||
if (left.matches(".*[/\\\\]repository[/\\\\].*") && right.matches(".*[/\\\\]repository[/\\\\].*")) {
|
||||
|
||||
if (left.matches(".*[/\\\\](repository|local-repo)[/\\\\].*") && right.matches(".*[/\\\\](repository|local-repo)[/\\\\].*")) {
|
||||
left = getBaseRepoPath(left);
|
||||
right = getBaseRepoPath(right);
|
||||
}
|
||||
@@ -322,60 +373,6 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bundling Ruby gems that are identified from different .gemspec files but
|
||||
* denote the same package path. This happens when Ruby bundler installs an
|
||||
* application's dependencies by running "bundle install".
|
||||
*
|
||||
* @param dependency1 dependency to compare
|
||||
* @param dependency2 dependency to compare
|
||||
* @return true if the the dependencies being analyzed appear to be the
|
||||
* same; otherwise false
|
||||
*/
|
||||
private boolean isSameRubyGem(Dependency dependency1, Dependency dependency2) {
|
||||
if (dependency1 == null || dependency2 == null
|
||||
|| !dependency1.getFileName().endsWith(".gemspec")
|
||||
|| !dependency2.getFileName().endsWith(".gemspec")
|
||||
|| dependency1.getPackagePath() == null
|
||||
|| dependency2.getPackagePath() == null) {
|
||||
return false;
|
||||
}
|
||||
if (dependency1.getPackagePath().equalsIgnoreCase(dependency2.getPackagePath())) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ruby gems installed by "bundle install" can have zero or more *.gemspec
|
||||
* files, all of which have the same packagePath and should be grouped. If
|
||||
* one of these gemspec is from <parent>/specifications/*.gemspec, because
|
||||
* it is a stub with fully resolved gem meta-data created by Ruby bundler,
|
||||
* this dependency should be the main one. Otherwise, use dependency2 as
|
||||
* main.
|
||||
*
|
||||
* This method returns null if any dependency is not from *.gemspec, or the
|
||||
* two do not have the same packagePath. In this case, they should not be
|
||||
* grouped.
|
||||
*
|
||||
* @param dependency1 dependency to compare
|
||||
* @param dependency2 dependency to compare
|
||||
* @return the main dependency; or null if a gemspec is not included in the
|
||||
* analysis
|
||||
*/
|
||||
private Dependency getMainGemspecDependency(Dependency dependency1, Dependency dependency2) {
|
||||
if (isSameRubyGem(dependency1, dependency2)) {
|
||||
final File lFile = dependency1.getActualFile();
|
||||
final File left = lFile.getParentFile();
|
||||
if (left != null && left.getName().equalsIgnoreCase("specifications")) {
|
||||
return dependency1;
|
||||
}
|
||||
return dependency2;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is likely a very broken attempt at determining if the 'left'
|
||||
* dependency is the 'core' library in comparison to the 'right' library.
|
||||
@@ -398,10 +395,6 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
|| !rightName.contains("core") && leftName.contains("core")
|
||||
|| !rightName.contains("kernel") && leftName.contains("kernel")) {
|
||||
returnVal = true;
|
||||
// } else if (leftName.matches(".*struts2\\-core.*") && rightName.matches(".*xwork\\-core.*")) {
|
||||
// returnVal = true;
|
||||
// } else if (rightName.matches(".*struts2\\-core.*") && leftName.matches(".*xwork\\-core.*")) {
|
||||
// returnVal = false;
|
||||
} else {
|
||||
/*
|
||||
* considered splitting the names up and comparing the components,
|
||||
@@ -464,6 +457,9 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
* <code>false</code>
|
||||
*/
|
||||
protected boolean firstPathIsShortest(String left, String right) {
|
||||
if (left.contains("dctemp")) {
|
||||
return false;
|
||||
}
|
||||
final String leftPath = left.replace('\\', '/');
|
||||
final String rightPath = right.replace('\\', '/');
|
||||
|
||||
@@ -503,4 +499,5 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
private boolean containedInWar(String filePath) {
|
||||
return filePath == null ? false : filePath.matches(".*\\.(ear|war)[\\\\/].*");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,283 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Set;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* This analyzer will merge dependencies, created from different source, into a
|
||||
* single dependency.</p>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class DependencyMergingAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Constants and Member Variables">
|
||||
/**
|
||||
* The Logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DependencyMergingAnalyzer.class);
|
||||
/**
|
||||
* a flag indicating if this analyzer has run. This analyzer only runs once.
|
||||
*/
|
||||
private boolean analyzed = false;
|
||||
|
||||
/**
|
||||
* Returns a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once. Note this is currently only used in the unit tests.
|
||||
*
|
||||
* @return a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once
|
||||
*/
|
||||
protected boolean getAnalyzed() {
|
||||
return analyzed;
|
||||
}
|
||||
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "Dependency Merging Analyzer";
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.POST_INFORMATION_COLLECTION;
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase that the analyzer is intended to run in.
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Does not support parallel processing as it only runs once and then
|
||||
* operates on <em>all</em> dependencies.
|
||||
*
|
||||
* @return whether or not parallel processing is enabled
|
||||
* @see #analyze(Dependency, Engine)
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_DEPENDENCY_MERGING_ENABLED;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Analyzes a set of dependencies. If they have been found to be the same
|
||||
* dependency created by more multiple FileTypeAnalyzers (i.e. a gemspec
|
||||
* dependency and a dependency from the Bundle Audit Analyzer. The
|
||||
* dependencies are then merged into a single reportable item.
|
||||
*
|
||||
* @param ignore this analyzer ignores the dependency being analyzed
|
||||
* @param engine the engine that is scanning the dependencies
|
||||
* @throws AnalysisException is thrown if there is an error reading the JAR
|
||||
* file.
|
||||
*/
|
||||
@Override
|
||||
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException {
|
||||
if (!analyzed) {
|
||||
analyzed = true;
|
||||
final Set<Dependency> dependenciesToRemove = new HashSet<Dependency>();
|
||||
final ListIterator<Dependency> mainIterator = engine.getDependencies().listIterator();
|
||||
//for (Dependency nextDependency : engine.getDependencies()) {
|
||||
while (mainIterator.hasNext()) {
|
||||
final Dependency dependency = mainIterator.next();
|
||||
if (mainIterator.hasNext() && !dependenciesToRemove.contains(dependency)) {
|
||||
final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex());
|
||||
while (subIterator.hasNext()) {
|
||||
final Dependency nextDependency = subIterator.next();
|
||||
Dependency main = null;
|
||||
if ((main = getMainGemspecDependency(dependency, nextDependency)) != null) {
|
||||
if (main == dependency) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
} else if ((main = getMainSwiftDependency(dependency, nextDependency)) != null) {
|
||||
if (main == dependency) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
//removing dependencies here as ensuring correctness and avoiding ConcurrentUpdateExceptions
|
||||
// was difficult because of the inner iterator.
|
||||
engine.getDependencies().removeAll(dependenciesToRemove);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the relatedDependency to the dependency's related dependencies.
|
||||
*
|
||||
* @param dependency the main dependency
|
||||
* @param relatedDependency a collection of dependencies to be removed from
|
||||
* the main analysis loop, this is the source of dependencies to remove
|
||||
* @param dependenciesToRemove a collection of dependencies that will be
|
||||
* removed from the main analysis loop, this function adds to this
|
||||
* collection
|
||||
*/
|
||||
private void mergeDependencies(final Dependency dependency, final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) {
|
||||
LOGGER.debug("Merging '{}' into '{}'", relatedDependency.getFilePath(), dependency.getFilePath());
|
||||
dependency.addRelatedDependency(relatedDependency);
|
||||
dependency.getVendorEvidence().getEvidence().addAll(relatedDependency.getVendorEvidence().getEvidence());
|
||||
dependency.getProductEvidence().getEvidence().addAll(relatedDependency.getProductEvidence().getEvidence());
|
||||
dependency.getVersionEvidence().getEvidence().addAll(relatedDependency.getVersionEvidence().getEvidence());
|
||||
|
||||
final Iterator<Dependency> i = relatedDependency.getRelatedDependencies().iterator();
|
||||
while (i.hasNext()) {
|
||||
dependency.addRelatedDependency(i.next());
|
||||
i.remove();
|
||||
}
|
||||
if (dependency.getSha1sum().equals(relatedDependency.getSha1sum())) {
|
||||
dependency.addAllProjectReferences(relatedDependency.getProjectReferences());
|
||||
}
|
||||
dependenciesToRemove.add(relatedDependency);
|
||||
}
|
||||
|
||||
/**
|
||||
* Bundling Ruby gems that are identified from different .gemspec files but
|
||||
* denote the same package path. This happens when Ruby bundler installs an
|
||||
* application's dependencies by running "bundle install".
|
||||
*
|
||||
* @param dependency1 dependency to compare
|
||||
* @param dependency2 dependency to compare
|
||||
* @return true if the the dependencies being analyzed appear to be the
|
||||
* same; otherwise false
|
||||
*/
|
||||
private boolean isSameRubyGem(Dependency dependency1, Dependency dependency2) {
|
||||
if (dependency1 == null || dependency2 == null
|
||||
|| !dependency1.getFileName().endsWith(".gemspec")
|
||||
|| !dependency2.getFileName().endsWith(".gemspec")
|
||||
|| dependency1.getPackagePath() == null
|
||||
|| dependency2.getPackagePath() == null) {
|
||||
return false;
|
||||
}
|
||||
return dependency1.getPackagePath().equalsIgnoreCase(dependency2.getPackagePath());
|
||||
}
|
||||
|
||||
/**
|
||||
* Ruby gems installed by "bundle install" can have zero or more *.gemspec
|
||||
* files, all of which have the same packagePath and should be grouped. If
|
||||
* one of these gemspec is from <parent>/specifications/*.gemspec, because
|
||||
* it is a stub with fully resolved gem meta-data created by Ruby bundler,
|
||||
* this dependency should be the main one. Otherwise, use dependency2 as
|
||||
* main.
|
||||
*
|
||||
* This method returns null if any dependency is not from *.gemspec, or the
|
||||
* two do not have the same packagePath. In this case, they should not be
|
||||
* grouped.
|
||||
*
|
||||
* @param dependency1 dependency to compare
|
||||
* @param dependency2 dependency to compare
|
||||
* @return the main dependency; or null if a gemspec is not included in the
|
||||
* analysis
|
||||
*/
|
||||
private Dependency getMainGemspecDependency(Dependency dependency1, Dependency dependency2) {
|
||||
if (isSameRubyGem(dependency1, dependency2)) {
|
||||
final File lFile = dependency1.getActualFile();
|
||||
final File left = lFile.getParentFile();
|
||||
if (left != null && left.getName().equalsIgnoreCase("specifications")) {
|
||||
return dependency1;
|
||||
}
|
||||
return dependency2;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bundling same swift dependencies with the same packagePath but identified
|
||||
* by different file type analyzers.
|
||||
*
|
||||
* @param dependency1 dependency to test
|
||||
* @param dependency2 dependency to test
|
||||
* @return <code>true</code> if the dependencies appear to be the same;
|
||||
* otherwise <code>false</code>
|
||||
*/
|
||||
private boolean isSameSwiftPackage(Dependency dependency1, Dependency dependency2) {
|
||||
if (dependency1 == null || dependency2 == null
|
||||
|| (!dependency1.getFileName().endsWith(".podspec")
|
||||
&& !dependency1.getFileName().equals("Package.swift"))
|
||||
|| (!dependency2.getFileName().endsWith(".podspec")
|
||||
&& !dependency2.getFileName().equals("Package.swift"))
|
||||
|| dependency1.getPackagePath() == null
|
||||
|| dependency2.getPackagePath() == null) {
|
||||
return false;
|
||||
}
|
||||
return dependency1.getPackagePath().equalsIgnoreCase(dependency2.getPackagePath());
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines which of the swift dependencies should be considered the
|
||||
* primary.
|
||||
*
|
||||
* @param dependency1 the first swift dependency to compare
|
||||
* @param dependency2 the second swift dependency to compare
|
||||
* @return the primary swift dependency
|
||||
*/
|
||||
private Dependency getMainSwiftDependency(Dependency dependency1, Dependency dependency2) {
|
||||
if (isSameSwiftPackage(dependency1, dependency2)) {
|
||||
if (dependency1.getFileName().endsWith(".podspec")) {
|
||||
return dependency1;
|
||||
}
|
||||
return dependency2;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -34,6 +34,7 @@ import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -83,6 +84,16 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_FALSE_POSITIVE_ENABLED;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
@@ -93,7 +104,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
* @throws AnalysisException is thrown if there is an error reading the JAR file.
|
||||
*/
|
||||
@Override
|
||||
public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
removeJreEntries(dependency);
|
||||
removeBadMatches(dependency);
|
||||
removeBadSpringMatches(dependency);
|
||||
@@ -423,28 +434,30 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
String parentPath = dependency.getFilePath().toLowerCase();
|
||||
if (parentPath.contains(".jar")) {
|
||||
parentPath = parentPath.substring(0, parentPath.indexOf(".jar") + 4);
|
||||
final Dependency parent = findDependency(parentPath, engine.getDependencies());
|
||||
if (parent != null) {
|
||||
boolean remove = false;
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
if ("cpe".equals(i.getType())) {
|
||||
final String trimmedCPE = trimCpeToVendor(i.getValue());
|
||||
for (Identifier parentId : parent.getIdentifiers()) {
|
||||
if ("cpe".equals(parentId.getType()) && parentId.getValue().startsWith(trimmedCPE)) {
|
||||
remove |= true;
|
||||
final List<Dependency> dependencies = engine.getDependencies();
|
||||
synchronized (dependencies) {
|
||||
final Dependency parent = findDependency(parentPath, dependencies);
|
||||
if (parent != null) {
|
||||
boolean remove = false;
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
if ("cpe".equals(i.getType())) {
|
||||
final String trimmedCPE = trimCpeToVendor(i.getValue());
|
||||
for (Identifier parentId : parent.getIdentifiers()) {
|
||||
if ("cpe".equals(parentId.getType()) && parentId.getValue().startsWith(trimmedCPE)) {
|
||||
remove |= true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!remove) { //we can escape early
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (!remove) { //we can escape early
|
||||
return;
|
||||
if (remove) {
|
||||
dependencies.remove(dependency);
|
||||
}
|
||||
}
|
||||
if (remove) {
|
||||
engine.getDependencies().remove(dependency);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersion;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -34,7 +35,7 @@ import org.owasp.dependencycheck.utils.DependencyVersionUtil;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class FileNameAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
public class FileNameAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
@@ -65,16 +66,27 @@ public class FileNameAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_FILE_NAME_ENABLED;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Python init files
|
||||
*/
|
||||
//CSOFF: WhitespaceAfter
|
||||
private static final NameFileFilter IGNORED_FILES = new NameFileFilter(new String[]{
|
||||
"__init__.py",
|
||||
"__init__.pyc",
|
||||
"__init__.pyo",
|
||||
});
|
||||
"__init__.pyo",});
|
||||
//CSON: WhitespaceAfter
|
||||
|
||||
/**
|
||||
* Collects information about the file name.
|
||||
@@ -85,7 +97,7 @@ public class FileNameAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
* file.
|
||||
*/
|
||||
@Override
|
||||
public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
|
||||
//strip any path information that may get added by ArchiveAnalyzer, etc.
|
||||
final File f = dependency.getActualFile();
|
||||
@@ -93,26 +105,27 @@ public class FileNameAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
|
||||
//add version evidence
|
||||
final DependencyVersion version = DependencyVersionUtil.parseVersion(fileName);
|
||||
final String packageName = DependencyVersionUtil.parsePreVersion(fileName);
|
||||
if (version != null) {
|
||||
// If the version number is just a number like 2 or 23, reduce the confidence
|
||||
// a shade. This should hopefully correct for cases like log4j.jar or
|
||||
// struts2-core.jar
|
||||
if (version.getVersionParts() == null || version.getVersionParts().size() < 2) {
|
||||
dependency.getVersionEvidence().addEvidence("file", "name",
|
||||
dependency.getVersionEvidence().addEvidence("file", "version",
|
||||
version.toString(), Confidence.MEDIUM);
|
||||
} else {
|
||||
dependency.getVersionEvidence().addEvidence("file", "version",
|
||||
version.toString(), Confidence.HIGHEST);
|
||||
}
|
||||
dependency.getVersionEvidence().addEvidence("file", "name",
|
||||
fileName, Confidence.MEDIUM);
|
||||
packageName, Confidence.MEDIUM);
|
||||
}
|
||||
|
||||
if (!IGNORED_FILES.accept(f)) {
|
||||
dependency.getProductEvidence().addEvidence("file", "name",
|
||||
fileName, Confidence.HIGH);
|
||||
packageName, Confidence.HIGH);
|
||||
dependency.getVendorEvidence().addEvidence("file", "name",
|
||||
fileName, Confidence.HIGH);
|
||||
packageName, Confidence.HIGH);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,8 +26,4 @@ import java.io.FileFilter;
|
||||
*/
|
||||
public interface FileTypeAnalyzer extends Analyzer, FileFilter {
|
||||
|
||||
/**
|
||||
* Resets the analyzers state.
|
||||
*/
|
||||
void reset();
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ import org.xml.sax.SAXException;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class HintAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
public class HintAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
@@ -82,6 +82,16 @@ public class HintAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_HINT_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* The initialize method does nothing for this Analyzer.
|
||||
@@ -89,9 +99,8 @@ public class HintAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
@Override
|
||||
public void initialize() throws InitializationException {
|
||||
public void initializeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
super.initialize();
|
||||
loadHintRules();
|
||||
} catch (HintParseException ex) {
|
||||
LOGGER.debug("Unable to parse hint file", ex);
|
||||
@@ -123,7 +132,7 @@ public class HintAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
* the dependency.
|
||||
*/
|
||||
@Override
|
||||
public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
for (HintRule hint : hints.getHintRules()) {
|
||||
boolean shouldAdd = false;
|
||||
for (Evidence given : hint.getGivenVendor()) {
|
||||
@@ -154,6 +163,9 @@ public class HintAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
for (Evidence e : hint.getAddProduct()) {
|
||||
dependency.getProductEvidence().addEvidence(e);
|
||||
}
|
||||
for (Evidence e : hint.getAddVersion()) {
|
||||
dependency.getVersionEvidence().addEvidence(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -311,14 +323,21 @@ public class HintAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
} else {
|
||||
file = new File(filePath);
|
||||
if (!file.exists()) {
|
||||
final InputStream fromClasspath = this.getClass().getClassLoader().getResourceAsStream(filePath);
|
||||
if (fromClasspath != null) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("hint", "xml");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyInputStreamToFile(fromClasspath, file);
|
||||
} catch (IOException ex) {
|
||||
throw new HintParseException("Unable to locate suppressions file in classpath", ex);
|
||||
InputStream fromClasspath = null;
|
||||
try {
|
||||
fromClasspath = this.getClass().getClassLoader().getResourceAsStream(filePath);
|
||||
if (fromClasspath != null) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("hint", "xml");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyInputStreamToFile(fromClasspath, file);
|
||||
} catch (IOException ex) {
|
||||
throw new HintParseException("Unable to locate hints file in classpath", ex);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (fromClasspath != null) {
|
||||
fromClasspath.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,10 +23,9 @@ import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.io.Reader;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
@@ -35,6 +34,7 @@ import java.util.Map.Entry;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.StringTokenizer;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.jar.Attributes;
|
||||
import java.util.jar.JarEntry;
|
||||
import java.util.jar.JarFile;
|
||||
@@ -76,7 +76,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* The count of directories created during analysis. This is used for
|
||||
* creating temporary directories.
|
||||
*/
|
||||
private static int dirCount = 0;
|
||||
private static final AtomicInteger DIR_COUNT = new AtomicInteger(0);
|
||||
/**
|
||||
* The system independent newline character.
|
||||
*/
|
||||
@@ -227,7 +227,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* file.
|
||||
*/
|
||||
@Override
|
||||
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
try {
|
||||
final List<ClassNameInformation> classNames = collectClassNames(dependency);
|
||||
final String fileName = dependency.getFileName().toLowerCase();
|
||||
@@ -243,7 +243,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final boolean addPackagesAsEvidence = !(hasManifest && hasPOM);
|
||||
analyzePackageNames(classNames, dependency, addPackagesAsEvidence);
|
||||
} catch (IOException ex) {
|
||||
throw new AnalysisException("Exception occurred reading the JAR file.", ex);
|
||||
throw new AnalysisException("Exception occurred reading the JAR file (" + dependency.getFileName() + ").", ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -260,80 +260,93 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @return whether or not evidence was added to the dependency
|
||||
*/
|
||||
protected boolean analyzePOM(Dependency dependency, List<ClassNameInformation> classes, Engine engine) throws AnalysisException {
|
||||
boolean foundSomething = false;
|
||||
final JarFile jar;
|
||||
JarFile jar = null;
|
||||
List<String> pomEntries = null;
|
||||
try {
|
||||
jar = new JarFile(dependency.getActualFilePath());
|
||||
pomEntries = retrievePomListing(jar);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.warn("Unable to read JarFile '{}'.", dependency.getActualFilePath());
|
||||
LOGGER.trace("", ex);
|
||||
return false;
|
||||
}
|
||||
List<String> pomEntries;
|
||||
try {
|
||||
pomEntries = retrievePomListing(jar);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.warn("Unable to read Jar file entries in '{}'.", dependency.getActualFilePath());
|
||||
LOGGER.trace("", ex);
|
||||
return false;
|
||||
}
|
||||
File externalPom = null;
|
||||
if (pomEntries.isEmpty()) {
|
||||
final String pomPath = FilenameUtils.removeExtension(dependency.getActualFilePath()) + ".pom";
|
||||
externalPom = new File(pomPath);
|
||||
if (externalPom.isFile()) {
|
||||
pomEntries.add(pomPath);
|
||||
} else {
|
||||
return false;
|
||||
if (jar != null) {
|
||||
try {
|
||||
jar.close();
|
||||
} catch (IOException ex1) {
|
||||
LOGGER.trace("", ex1);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
for (String path : pomEntries) {
|
||||
LOGGER.debug("Reading pom entry: {}", path);
|
||||
Properties pomProperties = null;
|
||||
if (pomEntries != null && pomEntries.size() <= 1) {
|
||||
try {
|
||||
if (externalPom == null) {
|
||||
String path = null;
|
||||
Properties pomProperties = null;
|
||||
File pomFile = null;
|
||||
if (pomEntries.size() == 1) {
|
||||
path = pomEntries.get(0);
|
||||
pomFile = extractPom(path, jar);
|
||||
pomProperties = retrievePomProperties(path, jar);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("ignore this, failed reading a non-existent pom.properties", ex);
|
||||
}
|
||||
Model pom = null;
|
||||
try {
|
||||
if (pomEntries.size() > 1) {
|
||||
//extract POM to its own directory and add it as its own dependency
|
||||
final Dependency newDependency = new Dependency();
|
||||
pom = extractPom(path, jar, newDependency);
|
||||
|
||||
final String displayPath = String.format("%s%s%s",
|
||||
dependency.getFilePath(),
|
||||
File.separator,
|
||||
path);
|
||||
final String displayName = String.format("%s%s%s",
|
||||
dependency.getFileName(),
|
||||
File.separator,
|
||||
path);
|
||||
|
||||
newDependency.setFileName(displayName);
|
||||
newDependency.setFilePath(displayPath);
|
||||
pom.processProperties(pomProperties);
|
||||
setPomEvidence(newDependency, pom, null);
|
||||
engine.getDependencies().add(newDependency);
|
||||
Collections.sort(engine.getDependencies());
|
||||
} else {
|
||||
if (externalPom == null) {
|
||||
pom = PomUtils.readPom(path, jar);
|
||||
} else {
|
||||
pom = PomUtils.readPom(externalPom);
|
||||
}
|
||||
pom.processProperties(pomProperties);
|
||||
foundSomething |= setPomEvidence(dependency, pom, classes);
|
||||
path = FilenameUtils.removeExtension(dependency.getActualFilePath()) + ".pom";
|
||||
pomFile = new File(path);
|
||||
}
|
||||
if (pomFile.isFile()) {
|
||||
final Model pom = PomUtils.readPom(pomFile);
|
||||
if (pom != null && pomProperties != null) {
|
||||
pom.processProperties(pomProperties);
|
||||
}
|
||||
if (pom != null) {
|
||||
return setPomEvidence(dependency, pom, classes);
|
||||
}
|
||||
return false;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
jar.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//reported possible null dereference on pomEntries is on a non-feasible path
|
||||
for (String path : pomEntries) {
|
||||
//TODO - one of these is likely the pom for the main JAR we are analyzing
|
||||
LOGGER.debug("Reading pom entry: {}", path);
|
||||
try {
|
||||
//extract POM to its own directory and add it as its own dependency
|
||||
final Properties pomProperties = retrievePomProperties(path, jar);
|
||||
final File pomFile = extractPom(path, jar);
|
||||
final Model pom = PomUtils.readPom(pomFile);
|
||||
pom.processProperties(pomProperties);
|
||||
|
||||
final String displayPath = String.format("%s%s%s",
|
||||
dependency.getFilePath(),
|
||||
File.separator,
|
||||
path);
|
||||
final String displayName = String.format("%s%s%s",
|
||||
dependency.getFileName(),
|
||||
File.separator,
|
||||
path);
|
||||
final Dependency newDependency = new Dependency();
|
||||
newDependency.setActualFilePath(pomFile.getAbsolutePath());
|
||||
newDependency.setFileName(displayName);
|
||||
newDependency.setFilePath(displayPath);
|
||||
setPomEvidence(newDependency, pom, null);
|
||||
engine.getDependencies().add(newDependency);
|
||||
} catch (AnalysisException ex) {
|
||||
LOGGER.warn("An error occurred while analyzing '{}'.", dependency.getActualFilePath());
|
||||
LOGGER.trace("", ex);
|
||||
}
|
||||
}
|
||||
return foundSomething;
|
||||
try {
|
||||
jar.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("", ex);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -346,7 +359,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws IOException thrown if there is an exception reading the
|
||||
* pom.properties
|
||||
*/
|
||||
private Properties retrievePomProperties(String path, final JarFile jar) throws IOException {
|
||||
private Properties retrievePomProperties(String path, final JarFile jar) {
|
||||
Properties pomProperties = null;
|
||||
final String propPath = path.substring(0, path.length() - 7) + "pom.properies";
|
||||
final ZipEntry propEntry = jar.getEntry(propPath);
|
||||
@@ -357,6 +370,10 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
pomProperties = new Properties();
|
||||
pomProperties.load(reader);
|
||||
LOGGER.debug("Read pom.properties: {}", propPath);
|
||||
} catch (UnsupportedEncodingException ex) {
|
||||
LOGGER.trace("UTF-8 is not supported", ex);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("Unable to read the POM properties", ex);
|
||||
} finally {
|
||||
if (reader != null) {
|
||||
try {
|
||||
@@ -393,64 +410,35 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the specified POM from a jar file and converts it to a Model.
|
||||
* Retrieves the specified POM from a jar.
|
||||
*
|
||||
* @param path the path to the pom.xml file within the jar file
|
||||
* @param jar the jar file to extract the pom from
|
||||
* @param dependency the dependency being analyzed
|
||||
* @return returns the POM object
|
||||
* @return returns the POM file
|
||||
* @throws AnalysisException is thrown if there is an exception extracting
|
||||
* or parsing the POM {@link org.owasp.dependencycheck.xml.pom.Model} object
|
||||
* the file
|
||||
*/
|
||||
private Model extractPom(String path, JarFile jar, Dependency dependency) throws AnalysisException {
|
||||
private File extractPom(String path, JarFile jar) throws AnalysisException {
|
||||
InputStream input = null;
|
||||
FileOutputStream fos = null;
|
||||
final File tmpDir = getNextTempDirectory();
|
||||
final File file = new File(tmpDir, "pom.xml");
|
||||
try {
|
||||
final ZipEntry entry = jar.getEntry(path);
|
||||
if (entry == null) {
|
||||
throw new AnalysisException(String.format("Pom (%s)does not exist in %s", path, jar.getName()));
|
||||
}
|
||||
input = jar.getInputStream(entry);
|
||||
fos = new FileOutputStream(file);
|
||||
IOUtils.copy(input, fos);
|
||||
dependency.setActualFilePath(file.getAbsolutePath());
|
||||
} catch (IOException ex) {
|
||||
LOGGER.warn("An error occurred reading '{}' from '{}'.", path, dependency.getFilePath());
|
||||
LOGGER.warn("An error occurred reading '{}' from '{}'.", path, jar.getName());
|
||||
LOGGER.error("", ex);
|
||||
} finally {
|
||||
closeStream(fos);
|
||||
closeStream(input);
|
||||
}
|
||||
return PomUtils.readPom(file);
|
||||
}
|
||||
|
||||
/**
|
||||
* Silently closes an input stream ignoring errors.
|
||||
*
|
||||
* @param stream an input stream to close
|
||||
*/
|
||||
private void closeStream(InputStream stream) {
|
||||
if (stream != null) {
|
||||
try {
|
||||
stream.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Silently closes an output stream ignoring errors.
|
||||
*
|
||||
* @param stream an output stream to close
|
||||
*/
|
||||
private void closeStream(OutputStream stream) {
|
||||
if (stream != null) {
|
||||
try {
|
||||
stream.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("", ex);
|
||||
}
|
||||
FileUtils.close(fos);
|
||||
FileUtils.close(input);
|
||||
}
|
||||
return file;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -487,7 +475,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
final String originalGroupID = groupid;
|
||||
if (groupid.startsWith("org.") || groupid.startsWith("com.")) {
|
||||
if (groupid != null && (groupid.startsWith("org.") || groupid.startsWith("com."))) {
|
||||
groupid = groupid.substring(4);
|
||||
}
|
||||
|
||||
@@ -496,7 +484,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
final String originalArtifactID = artifactid;
|
||||
if (artifactid.startsWith("org.") || artifactid.startsWith("com.")) {
|
||||
if (artifactid != null && (artifactid.startsWith("org.") || artifactid.startsWith("com."))) {
|
||||
artifactid = artifactid.substring(4);
|
||||
}
|
||||
|
||||
@@ -645,7 +633,8 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @return whether evidence was identified parsing the manifest
|
||||
* @throws IOException if there is an issue reading the JAR file
|
||||
*/
|
||||
protected boolean parseManifest(Dependency dependency, List<ClassNameInformation> classInformation) throws IOException {
|
||||
protected boolean parseManifest(Dependency dependency, List<ClassNameInformation> classInformation)
|
||||
throws IOException {
|
||||
boolean foundSomething = false;
|
||||
JarFile jar = null;
|
||||
try {
|
||||
@@ -685,7 +674,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
foundSomething = true;
|
||||
versionEvidence.addEvidence(source, key, value, Confidence.HIGH);
|
||||
} else if ("specification-version".equalsIgnoreCase(key)) {
|
||||
specificationVersion = key;
|
||||
specificationVersion = value;
|
||||
} else if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_VENDOR.toString())) {
|
||||
foundSomething = true;
|
||||
vendorEvidence.addEvidence(source, key, value, Confidence.HIGH);
|
||||
@@ -704,17 +693,12 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
addMatchingValues(classInformation, value, productEvidence);
|
||||
// //the following caused false positives.
|
||||
// } else if (key.equalsIgnoreCase(BUNDLE_VENDOR)) {
|
||||
// foundSomething = true;
|
||||
// vendorEvidence.addEvidence(source, key, value, Confidence.HIGH);
|
||||
// addMatchingValues(classInformation, value, vendorEvidence);
|
||||
} else if (key.equalsIgnoreCase(BUNDLE_VERSION)) {
|
||||
foundSomething = true;
|
||||
versionEvidence.addEvidence(source, key, value, Confidence.HIGH);
|
||||
} else if (key.equalsIgnoreCase(Attributes.Name.MAIN_CLASS.toString())) {
|
||||
continue;
|
||||
//skipping main class as if this has important information to add
|
||||
// it will be added during class name analysis... if other fields
|
||||
// have the information from the class name then they will get added...
|
||||
//skipping main class as if this has important information to add it will be added during class name analysis...
|
||||
} else {
|
||||
key = key.toLowerCase();
|
||||
if (!IGNORE_KEYS.contains(key)
|
||||
@@ -735,11 +719,11 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
} else if ("build-id".equals(key)) {
|
||||
int pos = value.indexOf('(');
|
||||
if (pos >= 0) {
|
||||
if (pos > 0) {
|
||||
value = value.substring(0, pos - 1);
|
||||
}
|
||||
pos = value.indexOf('[');
|
||||
if (pos >= 0) {
|
||||
if (pos > 0) {
|
||||
value = value.substring(0, pos - 1);
|
||||
}
|
||||
versionEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
|
||||
@@ -928,12 +912,15 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* Deletes any files extracted from the JAR during analysis.
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
public void closeAnalyzer() {
|
||||
if (tempFileLocation != null && tempFileLocation.exists()) {
|
||||
LOGGER.debug("Attempting to delete temporary files");
|
||||
final boolean success = FileUtils.delete(tempFileLocation);
|
||||
if (!success) {
|
||||
LOGGER.warn("Failed to delete some temporary files, see the log for more details");
|
||||
if (!success && tempFileLocation.exists()) {
|
||||
final String[] l = tempFileLocation.list();
|
||||
if (l != null && l.length > 0) {
|
||||
LOGGER.warn("Failed to delete some temporary files, see the log for more details");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1011,13 +998,11 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
if (list.size() == 2) {
|
||||
addEntry(product, list.get(1));
|
||||
}
|
||||
if (list.size() == 3) {
|
||||
} else if (list.size() == 3) {
|
||||
addEntry(vendor, list.get(1));
|
||||
addEntry(product, list.get(1));
|
||||
addEntry(product, list.get(2));
|
||||
}
|
||||
if (list.size() >= 4) {
|
||||
} else if (list.size() >= 4) {
|
||||
addEntry(vendor, list.get(1));
|
||||
addEntry(vendor, list.get(2));
|
||||
addEntry(product, list.get(1));
|
||||
@@ -1218,7 +1203,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException thrown if unable to create temporary directory
|
||||
*/
|
||||
private File getNextTempDirectory() throws AnalysisException {
|
||||
dirCount += 1;
|
||||
final int dirCount = DIR_COUNT.incrementAndGet();
|
||||
final File directory = new File(tempFileLocation, String.valueOf(dirCount));
|
||||
//getting an exception for some directories not being able to be created; might be because the directory already exists?
|
||||
if (directory.exists()) {
|
||||
|
||||
@@ -87,6 +87,10 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
private static final String SUPPORTED_EXTENSIONS = "jar";
|
||||
|
||||
/**
|
||||
* Whether or not the Nexus analyzer should use a proxy if configured.
|
||||
*/
|
||||
private boolean useProxy;
|
||||
/**
|
||||
* The Nexus Search to be set up for this analyzer.
|
||||
*/
|
||||
@@ -144,10 +148,11 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.debug("Initializing Nexus Analyzer");
|
||||
LOGGER.debug("Nexus Analyzer enabled: {}", isEnabled());
|
||||
if (isEnabled()) {
|
||||
useProxy = useProxy();
|
||||
final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL);
|
||||
LOGGER.debug("Nexus Analyzer URL: {}", searchUrl);
|
||||
try {
|
||||
searcher = new NexusSearch(new URL(searchUrl));
|
||||
searcher = new NexusSearch(new URL(searchUrl), useProxy);
|
||||
if (!searcher.preflightRequest()) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("There was an issue getting Nexus status. Disabling analyzer.");
|
||||
@@ -213,7 +218,7 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException when there's an exception during analysis
|
||||
*/
|
||||
@Override
|
||||
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (!isEnabled()) {
|
||||
return;
|
||||
}
|
||||
@@ -245,7 +250,8 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.warn("Unable to download pom.xml for {} from Nexus repository; "
|
||||
+ "this could result in undetected CPE/CVEs.", dependency.getFileName());
|
||||
} finally {
|
||||
if (pomFile != null && !FileUtils.deleteQuietly(pomFile)) {
|
||||
if (pomFile != null && pomFile.exists() && !FileUtils.deleteQuietly(pomFile)) {
|
||||
LOGGER.debug("Failed to delete temporary pom file {}", pomFile.toString());
|
||||
pomFile.deleteOnExit();
|
||||
}
|
||||
}
|
||||
@@ -262,4 +268,19 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.debug("Could not connect to nexus repository", ioe);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if a proxy should be used.
|
||||
*
|
||||
* @return {@code true} if a proxy should be used
|
||||
*/
|
||||
public static boolean useProxy() {
|
||||
try {
|
||||
return Settings.getString(Settings.KEYS.PROXY_SERVER) != null
|
||||
&& Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY);
|
||||
} catch (InvalidSettingException ise) {
|
||||
LOGGER.warn("Failed to parse proxy settings.", ise);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -121,7 +121,7 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeFileType(Dependency dependency, Engine engine)
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
final File file = dependency.getActualFile();
|
||||
JsonReader jsonReader;
|
||||
|
||||
@@ -127,7 +127,7 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException when there's an exception during analysis
|
||||
*/
|
||||
@Override
|
||||
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
LOGGER.debug("Checking Nuspec file {}", dependency);
|
||||
try {
|
||||
final NuspecParser parser = new XPathNuspecParser();
|
||||
|
||||
@@ -28,15 +28,18 @@ import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* NvdCveAnalyzer is a utility class that takes a project dependency and attempts to discern if there is an associated
|
||||
* CVEs. It uses the the identifiers found by other analyzers to lookup the CVE data.
|
||||
* NvdCveAnalyzer is a utility class that takes a project dependency and
|
||||
* attempts to discern if there is an associated CVEs. It uses the the
|
||||
* identifiers found by other analyzers to lookup the CVE data.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class NvdCveAnalyzer implements Analyzer {
|
||||
public class NvdCveAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger for use throughout the class
|
||||
*/
|
||||
@@ -56,7 +59,8 @@ public class NvdCveAnalyzer implements Analyzer {
|
||||
* @throws SQLException thrown when there is a SQL Exception
|
||||
* @throws IOException thrown when there is an IO Exception
|
||||
* @throws DatabaseException thrown when there is a database exceptions
|
||||
* @throws ClassNotFoundException thrown if the h2 database driver cannot be loaded
|
||||
* @throws ClassNotFoundException thrown if the h2 database driver cannot be
|
||||
* loaded
|
||||
*/
|
||||
public void open() throws SQLException, IOException, DatabaseException, ClassNotFoundException {
|
||||
cveDB = new CveDB();
|
||||
@@ -67,7 +71,7 @@ public class NvdCveAnalyzer implements Analyzer {
|
||||
* Closes the data source.
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
public void closeAnalyzer() {
|
||||
cveDB.close();
|
||||
cveDB = null;
|
||||
}
|
||||
@@ -95,14 +99,16 @@ public class NvdCveAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a dependency and attempts to determine if there are any CPE identifiers for this dependency.
|
||||
* Analyzes a dependency and attempts to determine if there are any CPE
|
||||
* identifiers for this dependency.
|
||||
*
|
||||
* @param dependency The Dependency to analyze
|
||||
* @param engine The analysis engine
|
||||
* @throws AnalysisException thrown if there is an issue analyzing the dependency
|
||||
* @throws AnalysisException thrown if there is an issue analyzing the
|
||||
* dependency
|
||||
*/
|
||||
@Override
|
||||
public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
for (Identifier id : dependency.getIdentifiers()) {
|
||||
if ("cpe".equals(id.getType())) {
|
||||
try {
|
||||
@@ -148,12 +154,24 @@ public class NvdCveAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the database used to gather NVD CVE data.
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @throws InitializationException is thrown if there is an issue opening the index.
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
public void initialize() throws InitializationException {
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_NVD_CVE_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the database used to gather NVD CVE data.
|
||||
*
|
||||
* @throws InitializationException is thrown if there is an issue opening
|
||||
* the index.
|
||||
*/
|
||||
@Override
|
||||
public void initializeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
this.open();
|
||||
} catch (SQLException ex) {
|
||||
|
||||
@@ -162,7 +162,7 @@ public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* analyzing the dependency
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeFileType(Dependency dependency, Engine engine)
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
final File file = dependency.getActualFile();
|
||||
final String parentName = file.getParentFile().getName();
|
||||
|
||||
@@ -24,9 +24,9 @@ import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FilenameFilter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import org.apache.commons.io.filefilter.NameFileFilter;
|
||||
import org.apache.commons.io.filefilter.SuffixFileFilter;
|
||||
import org.apache.commons.io.input.AutoCloseInputStream;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
@@ -45,6 +45,7 @@ import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.FileUtils;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.UrlStringUtils;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
/**
|
||||
* Used to analyze a Wheel or egg distribution files, or their contents in
|
||||
@@ -76,7 +77,7 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* The count of directories created during analysis. This is used for
|
||||
* creating temporary directories.
|
||||
*/
|
||||
private static int dirCount = 0;
|
||||
private static final AtomicInteger DIR_COUNT = new AtomicInteger(0);
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
@@ -180,7 +181,7 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeFileType(Dependency dependency, Engine engine)
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
final File actualFile = dependency.getActualFile();
|
||||
if (WHL_FILTER.accept(actualFile)) {
|
||||
@@ -228,10 +229,13 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
throw new AnalysisException(ex);
|
||||
}
|
||||
|
||||
collectWheelMetadata(
|
||||
dependency,
|
||||
getMatchingFile(getMatchingFile(temp, folderFilter),
|
||||
metadataFilter));
|
||||
File matchingFile = getMatchingFile(temp, folderFilter);
|
||||
if (matchingFile != null) {
|
||||
matchingFile = getMatchingFile(matchingFile, metadataFilter);
|
||||
if (matchingFile != null) {
|
||||
collectWheelMetadata(dependency, matchingFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -269,13 +273,15 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* Deletes any files extracted from the Wheel during analysis.
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
public void closeAnalyzer() {
|
||||
if (tempFileLocation != null && tempFileLocation.exists()) {
|
||||
LOGGER.debug("Attempting to delete temporary files");
|
||||
final boolean success = FileUtils.delete(tempFileLocation);
|
||||
if (!success) {
|
||||
LOGGER.warn(
|
||||
"Failed to delete some temporary files, see the log for more details");
|
||||
if (!success && tempFileLocation.exists()) {
|
||||
final String[] l = tempFileLocation.list();
|
||||
if (l != null && l.length > 0) {
|
||||
LOGGER.warn("Failed to delete some temporary files, see the log for more details");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -354,13 +360,22 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if (null == manifest) {
|
||||
LOGGER.debug("Manifest file not found.");
|
||||
} else {
|
||||
InputStream in = null;
|
||||
try {
|
||||
result.load(new AutoCloseInputStream(new BufferedInputStream(
|
||||
new FileInputStream(manifest))));
|
||||
in = new BufferedInputStream(new FileInputStream(manifest));
|
||||
result.load(in);
|
||||
} catch (MessagingException e) {
|
||||
LOGGER.warn(e.getMessage(), e);
|
||||
} catch (FileNotFoundException e) {
|
||||
LOGGER.warn(e.getMessage(), e);
|
||||
} finally {
|
||||
if (in != null) {
|
||||
try {
|
||||
in.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("failed to close input stream", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
@@ -379,7 +394,7 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
// getting an exception for some directories not being able to be
|
||||
// created; might be because the directory already exists?
|
||||
do {
|
||||
dirCount += 1;
|
||||
final int dirCount = DIR_COUNT.incrementAndGet();
|
||||
directory = new File(tempFileLocation, String.valueOf(dirCount));
|
||||
} while (directory.exists());
|
||||
if (!directory.mkdirs()) {
|
||||
|
||||
@@ -33,8 +33,6 @@ import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
@@ -173,7 +171,7 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* analyzing the dependency
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeFileType(Dependency dependency, Engine engine)
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
final File file = dependency.getActualFile();
|
||||
final File parent = file.getParentFile();
|
||||
@@ -193,11 +191,7 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// copy, alter and set in case some other thread is iterating over
|
||||
final List<Dependency> dependencies = new ArrayList<Dependency>(
|
||||
engine.getDependencies());
|
||||
dependencies.remove(dependency);
|
||||
engine.setDependencies(dependencies);
|
||||
engine.getDependencies().remove(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -23,25 +23,26 @@ import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.nio.charset.Charset;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Reference;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
* Used to analyze Ruby Bundler Gemspec.lock files utilizing the 3rd party
|
||||
@@ -114,7 +115,15 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
final List<String> args = new ArrayList<String>();
|
||||
final String bundleAuditPath = Settings.getString(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH);
|
||||
args.add(null == bundleAuditPath ? "bundle-audit" : bundleAuditPath);
|
||||
File bundleAudit = null;
|
||||
if (bundleAuditPath != null) {
|
||||
bundleAudit = new File(bundleAuditPath);
|
||||
if (!bundleAudit.isFile()) {
|
||||
LOGGER.warn("Supplied `bundleAudit` path is incorrect: " + bundleAuditPath);
|
||||
bundleAudit = null;
|
||||
}
|
||||
}
|
||||
args.add(bundleAudit != null && bundleAudit.isFile() ? bundleAudit.getAbsolutePath() : "bundle-audit");
|
||||
args.add("check");
|
||||
args.add("--verbose");
|
||||
final ProcessBuilder builder = new ProcessBuilder(args);
|
||||
@@ -243,7 +252,7 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* If {@link #analyzeFileType(Dependency, Engine)} is called, then we have
|
||||
* If {@link #analyzeDependency(Dependency, Engine)} is called, then we have
|
||||
* successfully initialized, and it will be necessary to disable
|
||||
* {@link RubyGemspecAnalyzer}.
|
||||
*/
|
||||
@@ -257,7 +266,7 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException thrown if there is an analysis exception.
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeFileType(Dependency dependency, Engine engine)
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
if (needToDisableGemspecAnalyzer) {
|
||||
boolean failed = true;
|
||||
@@ -279,11 +288,16 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
final File parentFile = dependency.getActualFile().getParentFile();
|
||||
final Process process = launchBundleAudit(parentFile);
|
||||
final int exitValue;
|
||||
try {
|
||||
process.waitFor();
|
||||
exitValue = process.waitFor();
|
||||
} catch (InterruptedException ie) {
|
||||
throw new AnalysisException("bundle-audit process interrupted", ie);
|
||||
}
|
||||
if (exitValue < 0 || exitValue > 1) {
|
||||
final String msg = String.format("Unexpected exit code from bundle-audit process; exit code: %s", exitValue);
|
||||
throw new AnalysisException(msg);
|
||||
}
|
||||
BufferedReader rdr = null;
|
||||
BufferedReader errReader = null;
|
||||
try {
|
||||
@@ -482,7 +496,9 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
private Dependency createDependencyForGem(Engine engine, String parentName, String fileName, String filePath, String gem) throws IOException {
|
||||
final File gemFile = new File(Settings.getTempDirectory(), gem + "_Gemfile.lock");
|
||||
gemFile.createNewFile();
|
||||
if (!gemFile.createNewFile()) {
|
||||
throw new IOException("Unable to create temporary gem file");
|
||||
}
|
||||
final String displayFileName = String.format("%s%c%s:%s", parentName, File.separatorChar, fileName, gem);
|
||||
|
||||
FileUtils.write(gemFile, displayFileName, Charset.defaultCharset()); // unique contents to avoid dependency bundling
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2016 Bianca Jiang. All Rights Reserved.
|
||||
* Copyright (c) 2016 IBM Corporation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
@@ -27,8 +27,9 @@ import org.owasp.dependencycheck.dependency.Dependency;
|
||||
/**
|
||||
* This analyzer accepts the fully resolved .gemspec created by the Ruby bundler
|
||||
* (http://bundler.io) for better evidence results. It also tries to resolve the
|
||||
* dependency packagePath to where the gem is actually installed. Then during {@link org.owasp.dependencycheck.analyzer.AnalysisPhase#PRE_FINDING_ANALYSIS}
|
||||
* {@link DependencyBundlingAnalyzer} will merge two .gemspec dependencies
|
||||
* dependency packagePath to where the gem is actually installed. Then during
|
||||
* the {@link org.owasp.dependencycheck.analyzer.AnalysisPhase#PRE_FINDING_ANALYSIS}
|
||||
* {@link DependencyMergingAnalyzer} will merge two .gemspec dependencies
|
||||
* together if <code>Dependency.getPackagePath()</code> are the same.
|
||||
*
|
||||
* Ruby bundler creates new .gemspec files under a folder called
|
||||
@@ -39,11 +40,11 @@ import org.owasp.dependencycheck.dependency.Dependency;
|
||||
* can't be used for evidences.
|
||||
*
|
||||
* Note this analyzer share the same
|
||||
* {@link org.owasp.dependencycheck.utils.Settings.KEYS#ANALYZER_RUBY_GEMSPEC_ENABLED} as
|
||||
* {@link RubyGemspecAnalyzer}, so it will enabled/disabled with
|
||||
* {@link org.owasp.dependencycheck.utils.Settings.KEYS#ANALYZER_RUBY_GEMSPEC_ENABLED}
|
||||
* as {@link RubyGemspecAnalyzer}, so it will enabled/disabled with
|
||||
* {@link RubyGemspecAnalyzer}.
|
||||
*
|
||||
* @author Bianca Jiang (biancajiang@gmail.com)
|
||||
* @author Bianca Jiang (https://twitter.com/biancajiang)
|
||||
*/
|
||||
@Experimental
|
||||
public class RubyBundlerAnalyzer extends RubyGemspecAnalyzer {
|
||||
@@ -93,9 +94,9 @@ public class RubyBundlerAnalyzer extends RubyGemspecAnalyzer {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeFileType(Dependency dependency, Engine engine)
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
super.analyzeFileType(dependency, engine);
|
||||
super.analyzeDependency(dependency, engine);
|
||||
|
||||
//find the corresponding gem folder for this .gemspec stub by "bundle install --deployment"
|
||||
final File gemspecFile = dependency.getActualFile();
|
||||
|
||||
@@ -130,7 +130,7 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
private static final Pattern GEMSPEC_BLOCK_INIT = Pattern.compile("Gem::Specification\\.new\\s+?do\\s+?\\|(.+?)\\|");
|
||||
|
||||
@Override
|
||||
protected void analyzeFileType(Dependency dependency, Engine engine)
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
String contents;
|
||||
try {
|
||||
@@ -217,6 +217,9 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
return name.contains(VERSION_FILE_NAME);
|
||||
}
|
||||
});
|
||||
if (matchingFiles == null) {
|
||||
return;
|
||||
}
|
||||
for (File f : matchingFiles) {
|
||||
try {
|
||||
final List<String> lines = FileUtils.readLines(f, Charset.defaultCharset());
|
||||
|
||||
@@ -0,0 +1,192 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2016 IBM Corporation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
* This analyzer is used to analyze the SWIFT Package Manager
|
||||
* (https://swift.org/package-manager/). It collects information about a package
|
||||
* from Package.swift files.
|
||||
*
|
||||
* @author Bianca Jiang (https://twitter.com/biancajiang)
|
||||
*/
|
||||
@Experimental
|
||||
public class SwiftPackageManagerAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "SWIFT Package Manager Analyzer";
|
||||
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
|
||||
|
||||
/**
|
||||
* The file name to scan.
|
||||
*/
|
||||
public static final String SPM_FILE_NAME = "Package.swift";
|
||||
|
||||
/**
|
||||
* Filter that detects files named "package.json".
|
||||
*/
|
||||
private static final FileFilter SPM_FILE_FILTER = FileFilterBuilder.newInstance().addFilenames(SPM_FILE_NAME).build();
|
||||
|
||||
/**
|
||||
* The capture group #1 is the block variable. e.g. "import
|
||||
* PackageDescription let package = Package( name: "Gloss" )"
|
||||
*/
|
||||
private static final Pattern SPM_BLOCK_PATTERN = Pattern.compile("let[^=]+=\\s*Package\\s*\\(\\s*([^)]*)\\s*\\)", Pattern.DOTALL);
|
||||
|
||||
/**
|
||||
* Returns the FileFilter
|
||||
*
|
||||
* @return the FileFilter
|
||||
*/
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return SPM_FILE_FILTER;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() {
|
||||
// NO-OP
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase that the analyzer is intended to run in.
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
|
||||
String contents;
|
||||
try {
|
||||
contents = FileUtils.readFileToString(dependency.getActualFile(), Charset.defaultCharset());
|
||||
} catch (IOException e) {
|
||||
throw new AnalysisException(
|
||||
"Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
final Matcher matcher = SPM_BLOCK_PATTERN.matcher(contents);
|
||||
if (matcher.find()) {
|
||||
final String packageDescription = matcher.group(1);
|
||||
if (packageDescription.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
final EvidenceCollection product = dependency.getProductEvidence();
|
||||
final EvidenceCollection vendor = dependency.getVendorEvidence();
|
||||
|
||||
//SPM is currently under development for SWIFT 3. Its current metadata includes package name and dependencies.
|
||||
//Future interesting metadata: version, license, homepage, author, summary, etc.
|
||||
final String name = addStringEvidence(product, packageDescription, "name", "name", Confidence.HIGHEST);
|
||||
if (name != null && !name.isEmpty()) {
|
||||
vendor.addEvidence(SPM_FILE_NAME, "name_project", name, Confidence.HIGHEST);
|
||||
}
|
||||
}
|
||||
setPackagePath(dependency);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts evidence from the package description and adds it to the given
|
||||
* evidence collection.
|
||||
*
|
||||
* @param evidences the evidence collection to update
|
||||
* @param packageDescription the text to extract evidence from
|
||||
* @param field the name of the field being searched for
|
||||
* @param fieldPattern the field pattern within the contents to search for
|
||||
* @param confidence the confidence level of the evidence if found
|
||||
* @return the string that was added as evidence
|
||||
*/
|
||||
private String addStringEvidence(EvidenceCollection evidences,
|
||||
String packageDescription, String field, String fieldPattern, Confidence confidence) {
|
||||
String value = "";
|
||||
|
||||
final Matcher matcher = Pattern.compile(
|
||||
String.format("%s *:\\s*\"([^\"]*)", fieldPattern), Pattern.DOTALL).matcher(packageDescription);
|
||||
if (matcher.find()) {
|
||||
value = matcher.group(1);
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
value = value.trim();
|
||||
if (value.length() > 0) {
|
||||
evidences.addEvidence(SPM_FILE_NAME, field, value, confidence);
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the package path on the given dependency.
|
||||
*
|
||||
* @param dep the dependency to update
|
||||
*/
|
||||
private void setPackagePath(Dependency dep) {
|
||||
final File file = new File(dep.getFilePath());
|
||||
final String parent = file.getParent();
|
||||
if (parent != null) {
|
||||
dep.setPackagePath(parent);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,169 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2017 Jeremy Long. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.Objects;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersion;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This analyzer attempts to filter out erroneous version numbers collected.
|
||||
* Initially, this will focus on JAR files that contain a POM version number
|
||||
* that matches the file name - if identified all other version information will
|
||||
* be removed.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class VersionFilterAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Constaints">
|
||||
/**
|
||||
* Evidence source.
|
||||
*/
|
||||
private static final String FILE = "file";
|
||||
/**
|
||||
* Evidence source.
|
||||
*/
|
||||
private static final String POM = "pom";
|
||||
/**
|
||||
* Evidence source.
|
||||
*/
|
||||
private static final String NEXUS = "nexus";
|
||||
/**
|
||||
* Evidence source.
|
||||
*/
|
||||
private static final String CENTRAL = "central";
|
||||
/**
|
||||
* Evidence source.
|
||||
*/
|
||||
private static final String MANIFEST = "Manifest";
|
||||
/**
|
||||
* Evidence name.
|
||||
*/
|
||||
private static final String VERSION = "version";
|
||||
/**
|
||||
* Evidence name.
|
||||
*/
|
||||
private static final String IMPLEMENTATION_VERSION = "Implementation-Version";
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "Version Filter Analyzer";
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.POST_INFORMATION_COLLECTION;
|
||||
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="Standard implementation of Analyzer">
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase that the analyzer is intended to run in.
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the setting key to determine if the analyzer is enabled.
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_VERSION_FILTER_ENABLED;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* The Logger for use throughout the class
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(VersionFilterAnalyzer.class);
|
||||
|
||||
/**
|
||||
* The HintAnalyzer uses knowledge about a dependency to add additional
|
||||
* information to help in identification of identifiers or vulnerabilities.
|
||||
*
|
||||
* @param dependency The dependency being analyzed
|
||||
* @param engine The scanning engine
|
||||
* @throws AnalysisException is thrown if there is an exception analyzing
|
||||
* the dependency.
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
String fileVersion = null;
|
||||
String pomVersion = null;
|
||||
String manifestVersion = null;
|
||||
for (Evidence e : dependency.getVersionEvidence()) {
|
||||
if (FILE.equals(e.getSource()) && VERSION.equals(e.getName())) {
|
||||
fileVersion = e.getValue(Boolean.FALSE);
|
||||
} else if ((NEXUS.equals(e.getSource()) || CENTRAL.equals(e.getSource())
|
||||
|| POM.equals(e.getSource())) && VERSION.equals(e.getName())) {
|
||||
pomVersion = e.getValue(Boolean.FALSE);
|
||||
} else if (MANIFEST.equals(e.getSource()) && IMPLEMENTATION_VERSION.equals(e.getName())) {
|
||||
manifestVersion = e.getValue(Boolean.FALSE);
|
||||
}
|
||||
}
|
||||
//ensure we have at least two not null
|
||||
if (((fileVersion == null ? 0 : 1) + (pomVersion == null ? 0 : 1) + (manifestVersion == null ? 0 : 1)) > 1) {
|
||||
final DependencyVersion dvFile = new DependencyVersion(fileVersion);
|
||||
final DependencyVersion dvPom = new DependencyVersion(pomVersion);
|
||||
final DependencyVersion dvManifest = new DependencyVersion(manifestVersion);
|
||||
final boolean fileMatch = Objects.equals(dvFile, dvPom) || Objects.equals(dvFile, dvManifest);
|
||||
final boolean manifestMatch = Objects.equals(dvManifest, dvPom) || Objects.equals(dvManifest, dvFile);
|
||||
final boolean pomMatch = Objects.equals(dvPom, dvFile) || Objects.equals(dvPom, dvManifest);
|
||||
if (fileMatch || manifestMatch || pomMatch) {
|
||||
LOGGER.debug("filtering evidence from {}", dependency.getFileName());
|
||||
final EvidenceCollection versionEvidence = dependency.getVersionEvidence();
|
||||
synchronized (versionEvidence) {
|
||||
final Iterator<Evidence> itr = versionEvidence.iterator();
|
||||
while (itr.hasNext()) {
|
||||
final Evidence e = itr.next();
|
||||
if (!(pomMatch && VERSION.equals(e.getName())
|
||||
&& (NEXUS.equals(e.getSource()) || CENTRAL.equals(e.getSource()) || POM.equals(e.getSource())))
|
||||
&& !(fileMatch && VERSION.equals(e.getName()) && FILE.equals(e.getSource()))
|
||||
&& !(manifestMatch && MANIFEST.equals(e.getSource()) && IMPLEMENTATION_VERSION.equals(e.getName()))) {
|
||||
itr.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -20,11 +20,13 @@ package org.owasp.dependencycheck.analyzer;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.xml.suppression.SuppressionRule;
|
||||
|
||||
/**
|
||||
* The suppression analyzer processes an externally defined XML document that complies with the suppressions.xsd schema.
|
||||
* Any identified Vulnerability entries within the dependencies that match will be removed.
|
||||
* The suppression analyzer processes an externally defined XML document that
|
||||
* complies with the suppressions.xsd schema. Any identified Vulnerability
|
||||
* entries within the dependencies that match will be removed.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -59,10 +61,29 @@ public class VulnerabilitySuppressionAnalyzer extends AbstractSuppressionAnalyze
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_VULNERABILITY_SUPPRESSION_ENABLED;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Analyzes a dependency's vulnerabilities against the configured CVE
|
||||
* suppressions.
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param engine a reference to the engine orchestrating the analysis
|
||||
* @throws AnalysisException thrown if there is an error during analysis
|
||||
*/
|
||||
@Override
|
||||
public void analyze(final Dependency dependency, final Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
|
||||
if (getRules() == null || getRules().size() <= 0) {
|
||||
return;
|
||||
|
||||
@@ -24,13 +24,13 @@ import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
import org.owasp.dependencycheck.data.nexus.MavenArtifact;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.URLConnectionFactory;
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.w3c.dom.Document;
|
||||
@@ -51,7 +51,7 @@ public class CentralSearch {
|
||||
/**
|
||||
* Whether to use the Proxy when making requests
|
||||
*/
|
||||
private boolean useProxy;
|
||||
private final boolean useProxy;
|
||||
|
||||
/**
|
||||
* Used for logging.
|
||||
@@ -61,8 +61,8 @@ public class CentralSearch {
|
||||
/**
|
||||
* Creates a NexusSearch for the given repository URL.
|
||||
*
|
||||
* @param rootURL the URL of the repository on which searches should execute. Only parameters are added to this (so it should
|
||||
* end in /select)
|
||||
* @param rootURL the URL of the repository on which searches should
|
||||
* execute. Only parameters are added to this (so it should end in /select)
|
||||
*/
|
||||
public CentralSearch(URL rootURL) {
|
||||
this.rootURL = rootURL;
|
||||
@@ -76,18 +76,20 @@ public class CentralSearch {
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches the configured Central URL for the given sha1 hash. If the artifact is found, a <code>MavenArtifact</code> is
|
||||
* populated with the GAV.
|
||||
* Searches the configured Central URL for the given sha1 hash. If the
|
||||
* artifact is found, a <code>MavenArtifact</code> is populated with the
|
||||
* GAV.
|
||||
*
|
||||
* @param sha1 the SHA-1 hash string for which to search
|
||||
* @return the populated Maven GAV.
|
||||
* @throws IOException if it's unable to connect to the specified repository or if the specified artifact is not found.
|
||||
* @throws IOException if it's unable to connect to the specified repository
|
||||
* or if the specified artifact is not found.
|
||||
*/
|
||||
public List<MavenArtifact> searchSha1(String sha1) throws IOException {
|
||||
if (null == sha1 || !sha1.matches("^[0-9A-Fa-f]{40}$")) {
|
||||
throw new IllegalArgumentException("Invalid SHA1 format");
|
||||
}
|
||||
|
||||
List<MavenArtifact> result = null;
|
||||
final URL url = new URL(rootURL + String.format("?q=1:\"%s\"&wt=xml", sha1));
|
||||
|
||||
LOGGER.debug("Searching Central url {}", url);
|
||||
@@ -108,15 +110,14 @@ public class CentralSearch {
|
||||
if (conn.getResponseCode() == 200) {
|
||||
boolean missing = false;
|
||||
try {
|
||||
final DocumentBuilder builder = DocumentBuilderFactory
|
||||
.newInstance().newDocumentBuilder();
|
||||
final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
|
||||
final Document doc = builder.parse(conn.getInputStream());
|
||||
final XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
final String numFound = xpath.evaluate("/response/result/@numFound", doc);
|
||||
if ("0".equals(numFound)) {
|
||||
missing = true;
|
||||
} else {
|
||||
final List<MavenArtifact> result = new ArrayList<MavenArtifact>();
|
||||
result = new ArrayList<MavenArtifact>();
|
||||
final NodeList docs = (NodeList) xpath.evaluate("/response/result/doc", doc, XPathConstants.NODESET);
|
||||
for (int i = 0; i < docs.getLength(); i++) {
|
||||
final String g = xpath.evaluate("./str[@name='g']", docs.item(i));
|
||||
@@ -144,16 +145,12 @@ public class CentralSearch {
|
||||
useHTTPS = true;
|
||||
}
|
||||
}
|
||||
|
||||
LOGGER.trace("Version: {}", v);
|
||||
result.add(new MavenArtifact(g, a, v, jarAvailable, pomAvailable, useHTTPS));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
// Anything else is jacked up XML stuff that we really can't recover
|
||||
// from well
|
||||
// Anything else is jacked up XML stuff that we really can't recover from well
|
||||
throw new IOException(e.getMessage(), e);
|
||||
}
|
||||
|
||||
@@ -162,10 +159,9 @@ public class CentralSearch {
|
||||
}
|
||||
} else {
|
||||
LOGGER.debug("Could not connect to Central received response code: {} {}",
|
||||
conn.getResponseCode(), conn.getResponseMessage());
|
||||
conn.getResponseCode(), conn.getResponseMessage());
|
||||
throw new IOException("Could not connect to Central");
|
||||
}
|
||||
|
||||
return null;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -209,10 +209,13 @@ public final class CpeMemoryIndex {
|
||||
|
||||
final Set<Pair<String, String>> data = cve.getVendorProductList();
|
||||
for (Pair<String, String> pair : data) {
|
||||
v.setStringValue(pair.getLeft());
|
||||
p.setStringValue(pair.getRight());
|
||||
indexWriter.addDocument(doc);
|
||||
resetFieldAnalyzer();
|
||||
//todo figure out why there are null products
|
||||
if (pair.getLeft() != null && pair.getRight() != null) {
|
||||
v.setStringValue(pair.getLeft());
|
||||
p.setStringValue(pair.getRight());
|
||||
indexWriter.addDocument(doc);
|
||||
resetFieldAnalyzer();
|
||||
}
|
||||
}
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
|
||||
@@ -22,12 +22,11 @@ import java.io.IOException;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
import org.owasp.dependencycheck.utils.URLConnectionFactory;
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.w3c.dom.Document;
|
||||
@@ -47,7 +46,7 @@ public class NexusSearch {
|
||||
/**
|
||||
* Whether to use the Proxy when making requests.
|
||||
*/
|
||||
private boolean useProxy;
|
||||
private final boolean useProxy;
|
||||
/**
|
||||
* Used for logging.
|
||||
*/
|
||||
@@ -56,32 +55,26 @@ public class NexusSearch {
|
||||
/**
|
||||
* Creates a NexusSearch for the given repository URL.
|
||||
*
|
||||
* @param rootURL the root URL of the repository on which searches should execute. full URL's are calculated relative to this
|
||||
* URL, so it should end with a /
|
||||
* @param rootURL the root URL of the repository on which searches should
|
||||
* execute. full URL's are calculated relative to this URL, so it should end
|
||||
* with a /
|
||||
* @param useProxy flag indicating if the proxy settings should be used
|
||||
*/
|
||||
public NexusSearch(URL rootURL) {
|
||||
public NexusSearch(URL rootURL, boolean useProxy) {
|
||||
this.rootURL = rootURL;
|
||||
try {
|
||||
if (null != Settings.getString(Settings.KEYS.PROXY_SERVER)
|
||||
&& Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY)) {
|
||||
useProxy = true;
|
||||
LOGGER.debug("Using proxy");
|
||||
} else {
|
||||
useProxy = false;
|
||||
LOGGER.debug("Not using proxy");
|
||||
}
|
||||
} catch (InvalidSettingException ise) {
|
||||
useProxy = false;
|
||||
}
|
||||
this.useProxy = useProxy;
|
||||
LOGGER.debug("Using proxy: {}", useProxy);
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches the configured Nexus repository for the given sha1 hash. If the artifact is found, a <code>MavenArtifact</code> is
|
||||
* populated with the coordinate information.
|
||||
* Searches the configured Nexus repository for the given sha1 hash. If the
|
||||
* artifact is found, a <code>MavenArtifact</code> is populated with the
|
||||
* coordinate information.
|
||||
*
|
||||
* @param sha1 The SHA-1 hash string for which to search
|
||||
* @return the populated Maven coordinates
|
||||
* @throws IOException if it's unable to connect to the specified repository or if the specified artifact is not found.
|
||||
* @throws IOException if it's unable to connect to the specified repository
|
||||
* or if the specified artifact is not found.
|
||||
*/
|
||||
public MavenArtifact searchSha1(String sha1) throws IOException {
|
||||
if (null == sha1 || !sha1.matches("^[0-9A-Fa-f]{40}$")) {
|
||||
@@ -106,57 +99,58 @@ public class NexusSearch {
|
||||
conn.addRequestProperty("Accept", "application/xml");
|
||||
conn.connect();
|
||||
|
||||
if (conn.getResponseCode() == 200) {
|
||||
try {
|
||||
final DocumentBuilder builder = DocumentBuilderFactory
|
||||
.newInstance().newDocumentBuilder();
|
||||
final Document doc = builder.parse(conn.getInputStream());
|
||||
final XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
final String groupId = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/groupId",
|
||||
doc);
|
||||
final String artifactId = xpath.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/artifactId",
|
||||
doc);
|
||||
final String version = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/version",
|
||||
doc);
|
||||
final String link = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/artifactLink",
|
||||
doc);
|
||||
final String pomLink = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/pomLink",
|
||||
doc);
|
||||
final MavenArtifact ma = new MavenArtifact(groupId, artifactId, version);
|
||||
if (link != null && !link.isEmpty()) {
|
||||
ma.setArtifactUrl(link);
|
||||
switch (conn.getResponseCode()) {
|
||||
case 200:
|
||||
try {
|
||||
final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
|
||||
final Document doc = builder.parse(conn.getInputStream());
|
||||
final XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
final String groupId = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/groupId",
|
||||
doc);
|
||||
final String artifactId = xpath.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/artifactId",
|
||||
doc);
|
||||
final String version = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/version",
|
||||
doc);
|
||||
final String link = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/artifactLink",
|
||||
doc);
|
||||
final String pomLink = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/pomLink",
|
||||
doc);
|
||||
final MavenArtifact ma = new MavenArtifact(groupId, artifactId, version);
|
||||
if (link != null && !link.isEmpty()) {
|
||||
ma.setArtifactUrl(link);
|
||||
}
|
||||
if (pomLink != null && !pomLink.isEmpty()) {
|
||||
ma.setPomUrl(pomLink);
|
||||
}
|
||||
return ma;
|
||||
} catch (Throwable e) {
|
||||
// Anything else is jacked-up XML stuff that we really can't recover
|
||||
// from well
|
||||
throw new IOException(e.getMessage(), e);
|
||||
}
|
||||
if (pomLink != null && !pomLink.isEmpty()) {
|
||||
ma.setPomUrl(pomLink);
|
||||
}
|
||||
return ma;
|
||||
} catch (Throwable e) {
|
||||
// Anything else is jacked-up XML stuff that we really can't recover
|
||||
// from well
|
||||
throw new IOException(e.getMessage(), e);
|
||||
}
|
||||
} else if (conn.getResponseCode() == 404) {
|
||||
throw new FileNotFoundException("Artifact not found in Nexus");
|
||||
} else {
|
||||
LOGGER.debug("Could not connect to Nexus received response code: {} {}",
|
||||
conn.getResponseCode(), conn.getResponseMessage());
|
||||
throw new IOException("Could not connect to Nexus");
|
||||
case 404:
|
||||
throw new FileNotFoundException("Artifact not found in Nexus");
|
||||
default:
|
||||
LOGGER.debug("Could not connect to Nexus received response code: {} {}",
|
||||
conn.getResponseCode(), conn.getResponseMessage());
|
||||
throw new IOException("Could not connect to Nexus");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Do a preflight request to see if the repository is actually working.
|
||||
*
|
||||
* @return whether the repository is listening and returns the /status URL correctly
|
||||
* @return whether the repository is listening and returns the /status URL
|
||||
* correctly
|
||||
*/
|
||||
public boolean preflightRequest() {
|
||||
HttpURLConnection conn;
|
||||
@@ -169,7 +163,8 @@ public class NexusSearch {
|
||||
LOGGER.warn("Expected 200 result from Nexus, got {}", conn.getResponseCode());
|
||||
return false;
|
||||
}
|
||||
final DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
|
||||
final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
|
||||
|
||||
final Document doc = builder.parse(conn.getInputStream());
|
||||
if (!"status".equals(doc.getDocumentElement().getNodeName())) {
|
||||
LOGGER.warn("Expected root node name of status, got {}", doc.getDocumentElement().getNodeName());
|
||||
|
||||
@@ -18,10 +18,11 @@
|
||||
package org.owasp.dependencycheck.data.nuget;
|
||||
|
||||
import java.io.InputStream;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Node;
|
||||
|
||||
@@ -36,7 +37,8 @@ public class XPathNuspecParser implements NuspecParser {
|
||||
* Gets the string value of a node or null if it's not present
|
||||
*
|
||||
* @param n the node to test
|
||||
* @return the string content of the node, or null if the node itself is null
|
||||
* @return the string content of the node, or null if the node itself is
|
||||
* null
|
||||
*/
|
||||
private String getOrNull(Node n) {
|
||||
if (n != null) {
|
||||
@@ -56,7 +58,9 @@ public class XPathNuspecParser implements NuspecParser {
|
||||
@Override
|
||||
public NugetPackage parse(InputStream stream) throws NuspecParseException {
|
||||
try {
|
||||
final Document d = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(stream);
|
||||
final DocumentBuilder db = XmlUtils.buildSecureDocumentBuilder();
|
||||
final Document d = db.parse(stream);
|
||||
|
||||
final XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
final NugetPackage nuspec = new NugetPackage();
|
||||
|
||||
|
||||
@@ -36,8 +36,10 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Loads the configured database driver and returns the database connection. If the embedded H2 database is used obtaining a
|
||||
* connection will ensure the database file exists and that the appropriate table structure has been created.
|
||||
* Loads the configured database driver and returns the database connection. If
|
||||
* the embedded H2 database is used obtaining a connection will ensure the
|
||||
* database file exists and that the appropriate table structure has been
|
||||
* created.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -87,12 +89,13 @@ public final class ConnectionFactory {
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the connection factory. Ensuring that the appropriate drivers are loaded and that a connection can be made
|
||||
* successfully.
|
||||
* Initializes the connection factory. Ensuring that the appropriate drivers
|
||||
* are loaded and that a connection can be made successfully.
|
||||
*
|
||||
* @throws DatabaseException thrown if we are unable to connect to the database
|
||||
* @throws DatabaseException thrown if we are unable to connect to the
|
||||
* database
|
||||
*/
|
||||
public static synchronized void initialize() throws DatabaseException {
|
||||
public static void initialize() throws DatabaseException {
|
||||
//this only needs to be called once.
|
||||
if (connectionString != null) {
|
||||
return;
|
||||
@@ -188,11 +191,12 @@ public final class ConnectionFactory {
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up resources and unloads any registered database drivers. This needs to be called to ensure the driver is
|
||||
* unregistered prior to the finalize method being called as during shutdown the class loader used to load the driver may be
|
||||
* unloaded prior to the driver being de-registered.
|
||||
* Cleans up resources and unloads any registered database drivers. This
|
||||
* needs to be called to ensure the driver is unregistered prior to the
|
||||
* finalize method being called as during shutdown the class loader used to
|
||||
* load the driver may be unloaded prior to the driver being de-registered.
|
||||
*/
|
||||
public static synchronized void cleanup() {
|
||||
public static void cleanup() {
|
||||
if (driver != null) {
|
||||
try {
|
||||
DriverManager.deregisterDriver(driver);
|
||||
@@ -210,10 +214,12 @@ public final class ConnectionFactory {
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new database connection object per the database configuration.
|
||||
* Constructs a new database connection object per the database
|
||||
* configuration.
|
||||
*
|
||||
* @return a database connection object
|
||||
* @throws DatabaseException thrown if there is an exception loading the database connection
|
||||
* @throws DatabaseException thrown if there is an exception loading the
|
||||
* database connection
|
||||
*/
|
||||
public static Connection getConnection() throws DatabaseException {
|
||||
initialize();
|
||||
@@ -228,10 +234,12 @@ public final class ConnectionFactory {
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the H2 database file exists. If it does not exist then the data structure will need to be created.
|
||||
* Determines if the H2 database file exists. If it does not exist then the
|
||||
* data structure will need to be created.
|
||||
*
|
||||
* @return true if the H2 database file does not exist; otherwise false
|
||||
* @throws IOException thrown if the data directory does not exist and cannot be created
|
||||
* @throws IOException thrown if the data directory does not exist and
|
||||
* cannot be created
|
||||
*/
|
||||
private static boolean h2DataFileExists() throws IOException {
|
||||
final File dir = Settings.getDataDirectory();
|
||||
@@ -241,7 +249,8 @@ public final class ConnectionFactory {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the database structure (tables and indexes) to store the CVE data.
|
||||
* Creates the database structure (tables and indexes) to store the CVE
|
||||
* data.
|
||||
*
|
||||
* @param conn the database connection
|
||||
* @throws DatabaseException thrown if there is a Database Exception
|
||||
@@ -271,14 +280,17 @@ public final class ConnectionFactory {
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the database schema by loading the upgrade script for the version specified. The intended use is that if the
|
||||
* current schema version is 2.9 then we would call updateSchema(conn, "2.9"). This would load the upgrade_2.9.sql file and
|
||||
* execute it against the database. The upgrade script must update the 'version' in the properties table.
|
||||
* Updates the database schema by loading the upgrade script for the version
|
||||
* specified. The intended use is that if the current schema version is 2.9
|
||||
* then we would call updateSchema(conn, "2.9"). This would load the
|
||||
* upgrade_2.9.sql file and execute it against the database. The upgrade
|
||||
* script must update the 'version' in the properties table.
|
||||
*
|
||||
* @param conn the database connection object
|
||||
* @param appExpectedVersion the schema version that the application expects
|
||||
* @param currentDbVersion the current schema version of the database
|
||||
* @throws DatabaseException thrown if there is an exception upgrading the database schema
|
||||
* @throws DatabaseException thrown if there is an exception upgrading the
|
||||
* database schema
|
||||
*/
|
||||
private static void updateSchema(Connection conn, DependencyVersion appExpectedVersion, DependencyVersion currentDbVersion)
|
||||
throws DatabaseException {
|
||||
@@ -340,15 +352,18 @@ public final class ConnectionFactory {
|
||||
}
|
||||
|
||||
/**
|
||||
* Counter to ensure that calls to ensureSchemaVersion does not end up in an endless loop.
|
||||
* Counter to ensure that calls to ensureSchemaVersion does not end up in an
|
||||
* endless loop.
|
||||
*/
|
||||
private static int callDepth = 0;
|
||||
|
||||
/**
|
||||
* Uses the provided connection to check the specified schema version within the database.
|
||||
* Uses the provided connection to check the specified schema version within
|
||||
* the database.
|
||||
*
|
||||
* @param conn the database connection object
|
||||
* @throws DatabaseException thrown if the schema version is not compatible with this version of dependency-check
|
||||
* @throws DatabaseException thrown if the schema version is not compatible
|
||||
* with this version of dependency-check
|
||||
*/
|
||||
private static void ensureSchemaVersion(Connection conn) throws DatabaseException {
|
||||
ResultSet rs = null;
|
||||
@@ -359,7 +374,13 @@ public final class ConnectionFactory {
|
||||
rs = ps.executeQuery();
|
||||
if (rs.next()) {
|
||||
final DependencyVersion appDbVersion = DependencyVersionUtil.parseVersion(DB_SCHEMA_VERSION);
|
||||
if (appDbVersion == null) {
|
||||
throw new DatabaseException("Invalid application database schema");
|
||||
}
|
||||
final DependencyVersion db = DependencyVersionUtil.parseVersion(rs.getString(1));
|
||||
if (db == null) {
|
||||
throw new DatabaseException("Invalid database schema");
|
||||
}
|
||||
if (appDbVersion.compareTo(db) > 0) {
|
||||
LOGGER.debug("Current Schema: {}", DB_SCHEMA_VERSION);
|
||||
LOGGER.debug("DB Schema: {}", rs.getString(1));
|
||||
|
||||
@@ -87,10 +87,12 @@ public class CveDB {
|
||||
open();
|
||||
try {
|
||||
final String databaseProductName = conn.getMetaData().getDatabaseProductName();
|
||||
batchSupported = conn.getMetaData().supportsBatchUpdates();
|
||||
LOGGER.debug("Database dialect: {}", databaseProductName);
|
||||
final Locale dbDialect = new Locale(databaseProductName);
|
||||
statementBundle = ResourceBundle.getBundle("data/dbStatements", dbDialect);
|
||||
if ("mysql".equalsIgnoreCase(databaseProductName)) {
|
||||
batchSupported = false;
|
||||
}
|
||||
} catch (SQLException se) {
|
||||
LOGGER.warn("Problem loading database specific dialect!", se);
|
||||
statementBundle = ResourceBundle.getBundle("data/dbStatements");
|
||||
@@ -117,7 +119,7 @@ public class CveDB {
|
||||
* @throws DatabaseException thrown if there is an error opening the
|
||||
* database connection
|
||||
*/
|
||||
public final void open() throws DatabaseException {
|
||||
public final synchronized void open() throws DatabaseException {
|
||||
if (!isOpen()) {
|
||||
conn = ConnectionFactory.getConnection();
|
||||
}
|
||||
@@ -127,7 +129,7 @@ public class CveDB {
|
||||
* Closes the DB4O database. Close should be called on this object when it
|
||||
* is done being used.
|
||||
*/
|
||||
public void close() {
|
||||
public synchronized void close() {
|
||||
if (conn != null) {
|
||||
try {
|
||||
conn.close();
|
||||
@@ -147,7 +149,7 @@ public class CveDB {
|
||||
*
|
||||
* @return whether the database connection is open or closed
|
||||
*/
|
||||
public boolean isOpen() {
|
||||
public synchronized boolean isOpen() {
|
||||
return conn != null;
|
||||
}
|
||||
|
||||
@@ -156,7 +158,7 @@ public class CveDB {
|
||||
*
|
||||
* @throws SQLException thrown if a SQL Exception occurs
|
||||
*/
|
||||
public void commit() throws SQLException {
|
||||
public synchronized void commit() throws SQLException {
|
||||
//temporary remove this as autocommit is on.
|
||||
//if (conn != null) {
|
||||
// conn.commit();
|
||||
@@ -200,7 +202,7 @@ public class CveDB {
|
||||
* analyzed
|
||||
* @return a set of vulnerable software
|
||||
*/
|
||||
public Set<VulnerableSoftware> getCPEs(String vendor, String product) {
|
||||
public synchronized Set<VulnerableSoftware> getCPEs(String vendor, String product) {
|
||||
final Set<VulnerableSoftware> cpe = new HashSet<VulnerableSoftware>();
|
||||
ResultSet rs = null;
|
||||
PreparedStatement ps = null;
|
||||
@@ -232,7 +234,7 @@ public class CveDB {
|
||||
* @throws DatabaseException thrown when there is an error retrieving the
|
||||
* data from the DB
|
||||
*/
|
||||
public Set<Pair<String, String>> getVendorProductList() throws DatabaseException {
|
||||
public synchronized Set<Pair<String, String>> getVendorProductList() throws DatabaseException {
|
||||
final Set<Pair<String, String>> data = new HashSet<Pair<String, String>>();
|
||||
ResultSet rs = null;
|
||||
PreparedStatement ps = null;
|
||||
@@ -257,7 +259,7 @@ public class CveDB {
|
||||
*
|
||||
* @return the properties from the database
|
||||
*/
|
||||
Properties getProperties() {
|
||||
synchronized Properties getProperties() {
|
||||
final Properties prop = new Properties();
|
||||
PreparedStatement ps = null;
|
||||
ResultSet rs = null;
|
||||
@@ -283,7 +285,7 @@ public class CveDB {
|
||||
* @param key the property key
|
||||
* @param value the property value
|
||||
*/
|
||||
void saveProperty(String key, String value) {
|
||||
synchronized void saveProperty(String key, String value) {
|
||||
try {
|
||||
try {
|
||||
final PreparedStatement mergeProperty = getConnection().prepareStatement(statementBundle.getString("MERGE_PROPERTY"));
|
||||
@@ -326,7 +328,7 @@ public class CveDB {
|
||||
* @return a list of Vulnerabilities
|
||||
* @throws DatabaseException thrown if there is an exception retrieving data
|
||||
*/
|
||||
public List<Vulnerability> getVulnerabilities(String cpeStr) throws DatabaseException {
|
||||
public synchronized List<Vulnerability> getVulnerabilities(String cpeStr) throws DatabaseException {
|
||||
final VulnerableSoftware cpe = new VulnerableSoftware();
|
||||
try {
|
||||
cpe.parseName(cpeStr);
|
||||
@@ -387,7 +389,7 @@ public class CveDB {
|
||||
* @return a vulnerability object
|
||||
* @throws DatabaseException if an exception occurs
|
||||
*/
|
||||
public Vulnerability getVulnerability(String cve) throws DatabaseException {
|
||||
public synchronized Vulnerability getVulnerability(String cve) throws DatabaseException {
|
||||
PreparedStatement psV = null;
|
||||
PreparedStatement psR = null;
|
||||
PreparedStatement psS = null;
|
||||
@@ -460,7 +462,7 @@ public class CveDB {
|
||||
* @param vuln the vulnerability to add to the database
|
||||
* @throws DatabaseException is thrown if the database
|
||||
*/
|
||||
public void updateVulnerability(Vulnerability vuln) throws DatabaseException {
|
||||
public synchronized void updateVulnerability(Vulnerability vuln) throws DatabaseException {
|
||||
PreparedStatement selectVulnerabilityId = null;
|
||||
PreparedStatement deleteVulnerability = null;
|
||||
PreparedStatement deleteReferences = null;
|
||||
@@ -636,7 +638,7 @@ public class CveDB {
|
||||
*
|
||||
* @return <code>true</code> if data exists; otherwise <code>false</code>
|
||||
*/
|
||||
public boolean dataExists() {
|
||||
public synchronized boolean dataExists() {
|
||||
Statement cs = null;
|
||||
ResultSet rs = null;
|
||||
try {
|
||||
@@ -658,7 +660,7 @@ public class CveDB {
|
||||
+ "If the problem persist try deleting the files in '{}' and running {} again. If the problem continues, please "
|
||||
+ "create a log file (see documentation at http://jeremylong.github.io/DependencyCheck/) and open a ticket at "
|
||||
+ "https://github.com/jeremylong/DependencyCheck/issues and include the log file.\n\n",
|
||||
dd, dd, Settings.getString(Settings.KEYS.APPLICATION_VAME));
|
||||
dd, dd, Settings.getString(Settings.KEYS.APPLICATION_NAME));
|
||||
LOGGER.debug("", ex);
|
||||
} finally {
|
||||
DBUtils.closeResultSet(rs);
|
||||
@@ -672,7 +674,7 @@ public class CveDB {
|
||||
* updates. This should be called after all updates have been completed to
|
||||
* ensure orphan entries are removed.
|
||||
*/
|
||||
public void cleanupDatabase() {
|
||||
public synchronized void cleanupDatabase() {
|
||||
PreparedStatement ps = null;
|
||||
try {
|
||||
ps = getConnection().prepareStatement(statementBundle.getString("CLEANUP_ORPHANS"));
|
||||
@@ -810,7 +812,7 @@ public class CveDB {
|
||||
*
|
||||
* Deletes unused dictionary entries from the database.
|
||||
*/
|
||||
public void deleteUnusedCpe() {
|
||||
public synchronized void deleteUnusedCpe() {
|
||||
PreparedStatement ps = null;
|
||||
try {
|
||||
ps = getConnection().prepareStatement(statementBundle.getString("DELETE_UNUSED_DICT_CPE"));
|
||||
@@ -832,7 +834,7 @@ public class CveDB {
|
||||
* @param vendor the CPE vendor
|
||||
* @param product the CPE product
|
||||
*/
|
||||
public void addCpe(String cpe, String vendor, String product) {
|
||||
public synchronized void addCpe(String cpe, String vendor, String product) {
|
||||
PreparedStatement ps = null;
|
||||
try {
|
||||
ps = getConnection().prepareStatement(statementBundle.getString("ADD_DICT_CPE"));
|
||||
|
||||
@@ -17,13 +17,13 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.nvdcve;
|
||||
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Properties;
|
||||
import java.util.TreeMap;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.format.DateTimeFormat;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
import org.owasp.dependencycheck.data.update.nvd.NvdCveInfo;
|
||||
import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
import org.slf4j.Logger;
|
||||
@@ -41,21 +41,24 @@ public class DatabaseProperties {
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DatabaseProperties.class);
|
||||
/**
|
||||
* Modified key word, used as a key to store information about the modified file (i.e. the containing the last 8 days of
|
||||
* updates)..
|
||||
* Modified key word, used as a key to store information about the modified
|
||||
* file (i.e. the containing the last 8 days of updates)..
|
||||
*/
|
||||
public static final String MODIFIED = "Modified";
|
||||
/**
|
||||
* The properties file key for the last checked field - used to store the last check time of the Modified NVD CVE xml file.
|
||||
* The properties file key for the last checked field - used to store the
|
||||
* last check time of the Modified NVD CVE xml file.
|
||||
*/
|
||||
public static final String LAST_CHECKED = "NVD CVE Checked";
|
||||
/**
|
||||
* The properties file key for the last updated field - used to store the last updated time of the Modified NVD CVE xml file.
|
||||
* The properties file key for the last updated field - used to store the
|
||||
* last updated time of the Modified NVD CVE xml file.
|
||||
*/
|
||||
public static final String LAST_UPDATED = "NVD CVE Modified";
|
||||
/**
|
||||
* Stores the last updated time for each of the NVD CVE files. These timestamps should be updated if we process the modified
|
||||
* file within 7 days of the last update.
|
||||
* Stores the last updated time for each of the NVD CVE files. These
|
||||
* timestamps should be updated if we process the modified file within 7
|
||||
* days of the last update.
|
||||
*/
|
||||
public static final String LAST_UPDATED_BASE = "NVD CVE ";
|
||||
/**
|
||||
@@ -121,7 +124,8 @@ public class DatabaseProperties {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the property value for the given key. If the key is not contained in the underlying properties null is returned.
|
||||
* Returns the property value for the given key. If the key is not contained
|
||||
* in the underlying properties null is returned.
|
||||
*
|
||||
* @param key the property key
|
||||
* @return the value of the property
|
||||
@@ -131,8 +135,8 @@ public class DatabaseProperties {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the property value for the given key. If the key is not contained in the underlying properties the default value is
|
||||
* returned.
|
||||
* Returns the property value for the given key. If the key is not contained
|
||||
* in the underlying properties the default value is returned.
|
||||
*
|
||||
* @param key the property key
|
||||
* @param defaultValue the default value
|
||||
@@ -152,8 +156,9 @@ public class DatabaseProperties {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a map of the meta data from the database properties. This primarily contains timestamps of when the NVD CVE
|
||||
* information was last updated.
|
||||
* Returns a map of the meta data from the database properties. This
|
||||
* primarily contains timestamps of when the NVD CVE information was last
|
||||
* updated.
|
||||
*
|
||||
* @return a map of the database meta data
|
||||
*/
|
||||
@@ -165,9 +170,12 @@ public class DatabaseProperties {
|
||||
if (key.startsWith("NVD CVE ")) {
|
||||
try {
|
||||
final long epoch = Long.parseLong((String) entry.getValue());
|
||||
final Date date = new Date(epoch);
|
||||
final DateFormat format = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss");
|
||||
final String formatted = format.format(date);
|
||||
final DateTime date = new DateTime(epoch);
|
||||
final DateTimeFormatter format = DateTimeFormat.forPattern("dd/MM/yyyy HH:mm:ss");
|
||||
final String formatted = format.print(date);
|
||||
// final Date date = new Date(epoch);
|
||||
// final DateFormat format = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss");
|
||||
// final String formatted = format.format(date);
|
||||
map.put(key, formatted);
|
||||
} catch (Throwable ex) { //deliberately being broad in this catch clause
|
||||
LOGGER.debug("Unable to parse timestamp from DB", ex);
|
||||
|
||||
@@ -115,7 +115,6 @@ class DriverShim implements Driver {
|
||||
* @throws SQLFeatureNotSupportedException thrown if the feature is not supported
|
||||
* @see java.sql.Driver#getParentLogger()
|
||||
*/
|
||||
@Override
|
||||
public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException {
|
||||
//return driver.getParentLogger();
|
||||
Method m = null;
|
||||
|
||||
@@ -18,18 +18,12 @@
|
||||
package org.owasp.dependencycheck.data.update;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.List;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.parsers.SAXParser;
|
||||
import javax.xml.parsers.SAXParserFactory;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import static org.owasp.dependencycheck.data.nvdcve.DatabaseProperties.LAST_CPE_UPDATE;
|
||||
import org.owasp.dependencycheck.data.update.cpe.CPEHandler;
|
||||
import org.owasp.dependencycheck.data.update.cpe.Cpe;
|
||||
@@ -37,21 +31,28 @@ import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
import org.owasp.dependencycheck.utils.DateUtil;
|
||||
import org.owasp.dependencycheck.utils.DownloadFailedException;
|
||||
import org.owasp.dependencycheck.utils.Downloader;
|
||||
import org.owasp.dependencycheck.utils.ExtractionUtil;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
/**
|
||||
*
|
||||
* This class is currently unused and if enabled will likely not work on MySQL as the MERGE statement is used.
|
||||
* This class is currently unused and if enabled will likely not work on MySQL
|
||||
* as the MERGE statement is used.
|
||||
*
|
||||
* The CpeUpdater is designed to download the CPE data file from NIST and import the data into the database. However, as this
|
||||
* currently adds no beneficial data, compared to what is in the CPE data contained in the CVE data files, this class is not
|
||||
* currently used. The code is being kept as a future update may utilize more data from the CPE xml files.
|
||||
* The CpeUpdater is designed to download the CPE data file from NIST and import
|
||||
* the data into the database. However, as this currently adds no beneficial
|
||||
* data, compared to what is in the CPE data contained in the CVE data files,
|
||||
* this class is not currently used. The code is being kept as a future update
|
||||
* may utilize more data from the CPE XML files.
|
||||
*
|
||||
* @deprecated the CPE updater is not currently used.
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@Deprecated
|
||||
public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
|
||||
/**
|
||||
@@ -61,6 +62,17 @@ public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
|
||||
@Override
|
||||
public void update() throws UpdateException {
|
||||
/*
|
||||
//the following could be used if this were ever used.
|
||||
try {
|
||||
if (!Settings.getBoolean(Settings.KEYS.UPDATE_NVDCVE_ENABLED, true)) {
|
||||
return;
|
||||
}
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.trace("inavlid setting UPDATE_NVDCVE_ENABLED", ex);
|
||||
}
|
||||
*/
|
||||
|
||||
try {
|
||||
openDataStores();
|
||||
if (updateNeeded()) {
|
||||
@@ -84,7 +96,8 @@ public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
* Downloads the CPE XML file.
|
||||
*
|
||||
* @return the file reference to the CPE.xml file
|
||||
* @throws UpdateException thrown if there is an issue downloading the XML file
|
||||
* @throws UpdateException thrown if there is an issue downloading the XML
|
||||
* file
|
||||
*/
|
||||
private File downloadCpe() throws UpdateException {
|
||||
File xml;
|
||||
@@ -94,7 +107,7 @@ public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
xml = File.createTempFile("cpe", ".xml", Settings.getTempDirectory());
|
||||
Downloader.fetchFile(url, xml);
|
||||
if (url.toExternalForm().endsWith(".xml.gz")) {
|
||||
extractGzip(xml);
|
||||
ExtractionUtil.extractGzip(xml);
|
||||
}
|
||||
|
||||
} catch (MalformedURLException ex) {
|
||||
@@ -112,12 +125,12 @@ public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
*
|
||||
* @param xml the CPE data file
|
||||
* @return the list of CPE entries
|
||||
* @throws UpdateException thrown if there is an issue with parsing the XML file
|
||||
* @throws UpdateException thrown if there is an issue with parsing the XML
|
||||
* file
|
||||
*/
|
||||
private List<Cpe> processXML(final File xml) throws UpdateException {
|
||||
try {
|
||||
final SAXParserFactory factory = SAXParserFactory.newInstance();
|
||||
final SAXParser saxParser = factory.newSAXParser();
|
||||
final SAXParser saxParser = XmlUtils.buildSecureSaxParser();
|
||||
final CPEHandler handler = new CPEHandler();
|
||||
saxParser.parse(xml, handler);
|
||||
return handler.getData();
|
||||
@@ -131,7 +144,8 @@ public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks to find the last time the CPE data was refreshed and if it needs to be updated.
|
||||
* Checks to find the last time the CPE data was refreshed and if it needs
|
||||
* to be updated.
|
||||
*
|
||||
* @return true if the CPE data should be refreshed
|
||||
*/
|
||||
@@ -145,56 +159,4 @@ public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
}
|
||||
return !DateUtil.withinDateRange(timestamp, now, days);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the file contained in a gzip archive. The extracted file is placed in the exact same path as the file specified.
|
||||
*
|
||||
* @param file the archive file
|
||||
* @throws FileNotFoundException thrown if the file does not exist
|
||||
* @throws IOException thrown if there is an error extracting the file.
|
||||
*/
|
||||
private void extractGzip(File file) throws FileNotFoundException, IOException {
|
||||
//TODO - move this to a util class as it is duplicative of (copy of) code in the DownloadTask
|
||||
final String originalPath = file.getPath();
|
||||
final File gzip = new File(originalPath + ".gz");
|
||||
if (gzip.isFile() && !gzip.delete()) {
|
||||
gzip.deleteOnExit();
|
||||
}
|
||||
if (!file.renameTo(gzip)) {
|
||||
throw new IOException("Unable to rename '" + file.getPath() + "'");
|
||||
}
|
||||
final File newfile = new File(originalPath);
|
||||
|
||||
final byte[] buffer = new byte[4096];
|
||||
|
||||
GZIPInputStream cin = null;
|
||||
FileOutputStream out = null;
|
||||
try {
|
||||
cin = new GZIPInputStream(new FileInputStream(gzip));
|
||||
out = new FileOutputStream(newfile);
|
||||
|
||||
int len;
|
||||
while ((len = cin.read(buffer)) > 0) {
|
||||
out.write(buffer, 0, len);
|
||||
}
|
||||
} finally {
|
||||
if (cin != null) {
|
||||
try {
|
||||
cin.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("ignore", ex);
|
||||
}
|
||||
}
|
||||
if (out != null) {
|
||||
try {
|
||||
out.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("ignore", ex);
|
||||
}
|
||||
}
|
||||
if (gzip.isFile()) {
|
||||
FileUtils.deleteQuietly(gzip);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -99,7 +99,16 @@ public class EngineVersionCheck implements CachedWebDataSource {
|
||||
@Override
|
||||
public void update() throws UpdateException {
|
||||
try {
|
||||
if (Settings.getBoolean(Settings.KEYS.AUTO_UPDATE)) {
|
||||
final boolean autoupdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE, true);
|
||||
final boolean enabled = Settings.getBoolean(Settings.KEYS.UPDATE_VERSION_CHECK_ENABLED, true);
|
||||
final String original = Settings.getString(Settings.KEYS.CVE_ORIGINAL_MODIFIED_20_URL);
|
||||
final String current = Settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL);
|
||||
/**
|
||||
* Only update if auto-update is enabled, the engine check is
|
||||
* enabled, and the NVD CVE URLs have not been modified (i.e. the
|
||||
* user has not configured them to point to an internal source).
|
||||
*/
|
||||
if (enabled && autoupdate && original != null && original.equals(current)) {
|
||||
openDatabase();
|
||||
LOGGER.debug("Begin Engine Version Check");
|
||||
final DatabaseProperties properties = cveDB.getDatabaseProperties();
|
||||
@@ -118,7 +127,7 @@ public class EngineVersionCheck implements CachedWebDataSource {
|
||||
}
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.debug("Database Exception opening databases to retrieve properties", ex);
|
||||
throw new UpdateException("Error occured updating database properties.");
|
||||
throw new UpdateException("Error occurred updating database properties.");
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.debug("Unable to determine if autoupdate is enabled", ex);
|
||||
} finally {
|
||||
@@ -220,11 +229,11 @@ public class EngineVersionCheck implements CachedWebDataSource {
|
||||
return releaseVersion.trim();
|
||||
}
|
||||
} catch (MalformedURLException ex) {
|
||||
LOGGER.debug("Unable to retrieve current release version of dependency-check", ex);
|
||||
LOGGER.debug("Unable to retrieve current release version of dependency-check - malformed url?");
|
||||
} catch (URLConnectionFailureException ex) {
|
||||
LOGGER.debug("Unable to retrieve current release version of dependency-check", ex);
|
||||
LOGGER.debug("Unable to retrieve current release version of dependency-check - connection failed");
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Unable to retrieve current release version of dependency-check", ex);
|
||||
LOGGER.debug("Unable to retrieve current release version of dependency-check - i/o exception");
|
||||
} finally {
|
||||
if (conn != null) {
|
||||
conn.disconnect();
|
||||
|
||||
@@ -67,6 +67,14 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
*/
|
||||
@Override
|
||||
public void update() throws UpdateException {
|
||||
try {
|
||||
if (!Settings.getBoolean(Settings.KEYS.UPDATE_NVDCVE_ENABLED, true)) {
|
||||
return;
|
||||
}
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.trace("inavlid setting UPDATE_NVDCVE_ENABLED", ex);
|
||||
}
|
||||
|
||||
try {
|
||||
openDataStores();
|
||||
boolean autoUpdate = true;
|
||||
@@ -77,10 +85,10 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
}
|
||||
if (autoUpdate && checkUpdate()) {
|
||||
final UpdateableNvdCve updateable = getUpdatesNeeded();
|
||||
getProperties().save(DatabaseProperties.LAST_CHECKED, Long.toString(System.currentTimeMillis()));
|
||||
if (updateable.isUpdateNeeded()) {
|
||||
performUpdate(updateable);
|
||||
}
|
||||
getProperties().save(DatabaseProperties.LAST_CHECKED, Long.toString(System.currentTimeMillis()));
|
||||
}
|
||||
} catch (MalformedURLException ex) {
|
||||
throw new UpdateException("NVD CVE properties files contain an invalid URL, unable to update the data to use the most current data.", ex);
|
||||
@@ -156,93 +164,86 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
* @throws UpdateException is thrown if there is an error updating the
|
||||
* database
|
||||
*/
|
||||
public void performUpdate(UpdateableNvdCve updateable) throws UpdateException {
|
||||
private void performUpdate(UpdateableNvdCve updateable) throws UpdateException {
|
||||
int maxUpdates = 0;
|
||||
try {
|
||||
for (NvdCveInfo cve : updateable) {
|
||||
if (cve.getNeedsUpdate()) {
|
||||
maxUpdates += 1;
|
||||
for (NvdCveInfo cve : updateable) {
|
||||
if (cve.getNeedsUpdate()) {
|
||||
maxUpdates += 1;
|
||||
}
|
||||
}
|
||||
if (maxUpdates <= 0) {
|
||||
return;
|
||||
}
|
||||
if (maxUpdates > 3) {
|
||||
LOGGER.info("NVD CVE requires several updates; this could take a couple of minutes.");
|
||||
}
|
||||
|
||||
final int poolSize = (MAX_THREAD_POOL_SIZE < maxUpdates) ? MAX_THREAD_POOL_SIZE : maxUpdates;
|
||||
|
||||
final ExecutorService downloadExecutors = Executors.newFixedThreadPool(poolSize);
|
||||
final ExecutorService processExecutor = Executors.newSingleThreadExecutor();
|
||||
final Set<Future<Future<ProcessTask>>> downloadFutures = new HashSet<Future<Future<ProcessTask>>>(maxUpdates);
|
||||
for (NvdCveInfo cve : updateable) {
|
||||
if (cve.getNeedsUpdate()) {
|
||||
final DownloadTask call = new DownloadTask(cve, processExecutor, getCveDB(), Settings.getInstance());
|
||||
downloadFutures.add(downloadExecutors.submit(call));
|
||||
}
|
||||
}
|
||||
downloadExecutors.shutdown();
|
||||
|
||||
//next, move the future future processTasks to just future processTasks
|
||||
final Set<Future<ProcessTask>> processFutures = new HashSet<Future<ProcessTask>>(maxUpdates);
|
||||
for (Future<Future<ProcessTask>> future : downloadFutures) {
|
||||
Future<ProcessTask> task = null;
|
||||
try {
|
||||
task = future.get();
|
||||
} catch (InterruptedException ex) {
|
||||
downloadExecutors.shutdownNow();
|
||||
processExecutor.shutdownNow();
|
||||
|
||||
LOGGER.debug("Thread was interrupted during download", ex);
|
||||
throw new UpdateException("The download was interrupted", ex);
|
||||
} catch (ExecutionException ex) {
|
||||
downloadExecutors.shutdownNow();
|
||||
processExecutor.shutdownNow();
|
||||
|
||||
LOGGER.debug("Thread was interrupted during download execution", ex);
|
||||
throw new UpdateException("The execution of the download was interrupted", ex);
|
||||
}
|
||||
if (task == null) {
|
||||
downloadExecutors.shutdownNow();
|
||||
processExecutor.shutdownNow();
|
||||
LOGGER.debug("Thread was interrupted during download");
|
||||
throw new UpdateException("The download was interrupted; unable to complete the update");
|
||||
} else {
|
||||
processFutures.add(task);
|
||||
}
|
||||
}
|
||||
|
||||
for (Future<ProcessTask> future : processFutures) {
|
||||
try {
|
||||
final ProcessTask task = future.get();
|
||||
if (task.getException() != null) {
|
||||
throw task.getException();
|
||||
}
|
||||
} catch (InterruptedException ex) {
|
||||
processExecutor.shutdownNow();
|
||||
LOGGER.debug("Thread was interrupted during processing", ex);
|
||||
throw new UpdateException(ex);
|
||||
} catch (ExecutionException ex) {
|
||||
processExecutor.shutdownNow();
|
||||
LOGGER.debug("Execution Exception during process", ex);
|
||||
throw new UpdateException(ex);
|
||||
} finally {
|
||||
processExecutor.shutdown();
|
||||
}
|
||||
if (maxUpdates <= 0) {
|
||||
return;
|
||||
}
|
||||
if (maxUpdates > 3) {
|
||||
LOGGER.info("NVD CVE requires several updates; this could take a couple of minutes.");
|
||||
}
|
||||
if (maxUpdates > 0) {
|
||||
openDataStores();
|
||||
}
|
||||
}
|
||||
|
||||
final int poolSize = (MAX_THREAD_POOL_SIZE < maxUpdates) ? MAX_THREAD_POOL_SIZE : maxUpdates;
|
||||
|
||||
final ExecutorService downloadExecutors = Executors.newFixedThreadPool(poolSize);
|
||||
final ExecutorService processExecutor = Executors.newSingleThreadExecutor();
|
||||
final Set<Future<Future<ProcessTask>>> downloadFutures = new HashSet<Future<Future<ProcessTask>>>(maxUpdates);
|
||||
for (NvdCveInfo cve : updateable) {
|
||||
if (cve.getNeedsUpdate()) {
|
||||
final DownloadTask call = new DownloadTask(cve, processExecutor, getCveDB(), Settings.getInstance());
|
||||
downloadFutures.add(downloadExecutors.submit(call));
|
||||
}
|
||||
}
|
||||
downloadExecutors.shutdown();
|
||||
|
||||
//next, move the future future processTasks to just future processTasks
|
||||
final Set<Future<ProcessTask>> processFutures = new HashSet<Future<ProcessTask>>(maxUpdates);
|
||||
for (Future<Future<ProcessTask>> future : downloadFutures) {
|
||||
Future<ProcessTask> task = null;
|
||||
try {
|
||||
task = future.get();
|
||||
} catch (InterruptedException ex) {
|
||||
downloadExecutors.shutdownNow();
|
||||
processExecutor.shutdownNow();
|
||||
|
||||
LOGGER.debug("Thread was interrupted during download", ex);
|
||||
throw new UpdateException("The download was interrupted", ex);
|
||||
} catch (ExecutionException ex) {
|
||||
downloadExecutors.shutdownNow();
|
||||
processExecutor.shutdownNow();
|
||||
|
||||
LOGGER.debug("Thread was interrupted during download execution", ex);
|
||||
throw new UpdateException("The execution of the download was interrupted", ex);
|
||||
}
|
||||
if (task == null) {
|
||||
downloadExecutors.shutdownNow();
|
||||
processExecutor.shutdownNow();
|
||||
LOGGER.debug("Thread was interrupted during download");
|
||||
throw new UpdateException("The download was interrupted; unable to complete the update");
|
||||
} else {
|
||||
processFutures.add(task);
|
||||
}
|
||||
}
|
||||
|
||||
for (Future<ProcessTask> future : processFutures) {
|
||||
try {
|
||||
final ProcessTask task = future.get();
|
||||
if (task.getException() != null) {
|
||||
throw task.getException();
|
||||
}
|
||||
} catch (InterruptedException ex) {
|
||||
processExecutor.shutdownNow();
|
||||
LOGGER.debug("Thread was interrupted during processing", ex);
|
||||
throw new UpdateException(ex);
|
||||
} catch (ExecutionException ex) {
|
||||
processExecutor.shutdownNow();
|
||||
LOGGER.debug("Execution Exception during process", ex);
|
||||
throw new UpdateException(ex);
|
||||
} finally {
|
||||
processExecutor.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
if (maxUpdates >= 1) { //ensure the modified file date gets written (we may not have actually updated it)
|
||||
getProperties().save(updateable.get(MODIFIED));
|
||||
LOGGER.info("Begin database maintenance.");
|
||||
getCveDB().cleanupDatabase();
|
||||
LOGGER.info("End database maintenance.");
|
||||
}
|
||||
} finally {
|
||||
closeDataStores();
|
||||
if (maxUpdates >= 1) { //ensure the modified file date gets written (we may not have actually updated it)
|
||||
getProperties().save(updateable.get(MODIFIED));
|
||||
LOGGER.info("Begin database maintenance.");
|
||||
getCveDB().cleanupDatabase();
|
||||
LOGGER.info("End database maintenance.");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -278,12 +279,22 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
}
|
||||
if (!getProperties().isEmpty()) {
|
||||
try {
|
||||
final int startYear = Settings.getInt(Settings.KEYS.CVE_START_YEAR, 2002);
|
||||
final int endYear = Calendar.getInstance().get(Calendar.YEAR);
|
||||
boolean needsFullUpdate = false;
|
||||
for (int y = startYear; y <= endYear; y++) {
|
||||
final long val = Long.parseLong(getProperties().getProperty(DatabaseProperties.LAST_UPDATED_BASE + y, "0"));
|
||||
if (val == 0) {
|
||||
needsFullUpdate = true;
|
||||
}
|
||||
}
|
||||
|
||||
final long lastUpdated = Long.parseLong(getProperties().getProperty(DatabaseProperties.LAST_UPDATED, "0"));
|
||||
final long now = System.currentTimeMillis();
|
||||
final int days = Settings.getInt(Settings.KEYS.CVE_MODIFIED_VALID_FOR_DAYS, 7);
|
||||
if (lastUpdated == updates.getTimeStamp(MODIFIED)) {
|
||||
if (!needsFullUpdate && lastUpdated == updates.getTimeStamp(MODIFIED)) {
|
||||
updates.clear(); //we don't need to update anything.
|
||||
} else if (DateUtil.withinDateRange(lastUpdated, now, days)) {
|
||||
} else if (!needsFullUpdate && DateUtil.withinDateRange(lastUpdated, now, days)) {
|
||||
for (NvdCveInfo entry : updates) {
|
||||
if (MODIFIED.equals(entry.getId())) {
|
||||
entry.setNeedsUpdate(true);
|
||||
|
||||
@@ -20,19 +20,17 @@ package org.owasp.dependencycheck.data.update.nvd;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
import org.owasp.dependencycheck.utils.DownloadFailedException;
|
||||
import org.owasp.dependencycheck.utils.Downloader;
|
||||
import org.owasp.dependencycheck.utils.ExtractionUtil;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -55,8 +53,9 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
|
||||
* @param nvdCveInfo the NVD CVE info
|
||||
* @param processor the processor service to submit the downloaded files to
|
||||
* @param cveDB the CVE DB to use to store the vulnerability data
|
||||
* @param settings a reference to the global settings object; this is necessary so that when the thread is started the
|
||||
* dependencies have a correct reference to the global settings.
|
||||
* @param settings a reference to the global settings object; this is
|
||||
* necessary so that when the thread is started the dependencies have a
|
||||
* correct reference to the global settings.
|
||||
* @throws UpdateException thrown if temporary files could not be created
|
||||
*/
|
||||
public DownloadTask(NvdCveInfo nvdCveInfo, ExecutorService processor, CveDB cveDB, Settings settings) throws UpdateException {
|
||||
@@ -178,10 +177,10 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
|
||||
return null;
|
||||
}
|
||||
if (url1.toExternalForm().endsWith(".xml.gz") && !isXml(first)) {
|
||||
extractGzip(first);
|
||||
ExtractionUtil.extractGzip(first);
|
||||
}
|
||||
if (url2.toExternalForm().endsWith(".xml.gz") && !isXml(second)) {
|
||||
extractGzip(second);
|
||||
ExtractionUtil.extractGzip(second);
|
||||
}
|
||||
|
||||
LOGGER.info("Download Complete for NVD CVE - {} ({} ms)", nvdCveInfo.getId(),
|
||||
@@ -205,25 +204,13 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
|
||||
* Attempts to delete the files that were downloaded.
|
||||
*/
|
||||
public void cleanup() {
|
||||
boolean deleted = false;
|
||||
try {
|
||||
if (first != null && first.exists()) {
|
||||
deleted = first.delete();
|
||||
}
|
||||
} finally {
|
||||
if (first != null && (first.exists() || !deleted)) {
|
||||
first.deleteOnExit();
|
||||
}
|
||||
if (first != null && first.exists() && first.delete()) {
|
||||
LOGGER.debug("Failed to delete first temporary file {}", second.toString());
|
||||
first.deleteOnExit();
|
||||
}
|
||||
try {
|
||||
deleted = false;
|
||||
if (second != null && second.exists()) {
|
||||
deleted = second.delete();
|
||||
}
|
||||
} finally {
|
||||
if (second != null && (second.exists() || !deleted)) {
|
||||
second.deleteOnExit();
|
||||
}
|
||||
if (second != null && second.exists() && !second.delete()) {
|
||||
LOGGER.debug("Failed to delete second temporary file {}", second.toString());
|
||||
second.deleteOnExit();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -266,55 +253,4 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the file contained in a gzip archive. The extracted file is placed in the exact same path as the file specified.
|
||||
*
|
||||
* @param file the archive file
|
||||
* @throws FileNotFoundException thrown if the file does not exist
|
||||
* @throws IOException thrown if there is an error extracting the file.
|
||||
*/
|
||||
private void extractGzip(File file) throws FileNotFoundException, IOException {
|
||||
final String originalPath = file.getPath();
|
||||
final File gzip = new File(originalPath + ".gz");
|
||||
if (gzip.isFile() && !gzip.delete()) {
|
||||
gzip.deleteOnExit();
|
||||
}
|
||||
if (!file.renameTo(gzip)) {
|
||||
throw new IOException("Unable to rename '" + file.getPath() + "'");
|
||||
}
|
||||
final File newfile = new File(originalPath);
|
||||
|
||||
final byte[] buffer = new byte[4096];
|
||||
|
||||
GZIPInputStream cin = null;
|
||||
FileOutputStream out = null;
|
||||
try {
|
||||
cin = new GZIPInputStream(new FileInputStream(gzip));
|
||||
out = new FileOutputStream(newfile);
|
||||
|
||||
int len;
|
||||
while ((len = cin.read(buffer)) > 0) {
|
||||
out.write(buffer, 0, len);
|
||||
}
|
||||
} finally {
|
||||
if (cin != null) {
|
||||
try {
|
||||
cin.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("ignore", ex);
|
||||
}
|
||||
}
|
||||
if (out != null) {
|
||||
try {
|
||||
out.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("ignore", ex);
|
||||
}
|
||||
}
|
||||
if (gzip.isFile()) {
|
||||
FileUtils.deleteQuietly(gzip);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,19 +26,20 @@ import java.util.Map;
|
||||
import java.util.concurrent.Callable;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.parsers.SAXParser;
|
||||
import javax.xml.parsers.SAXParserFactory;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
|
||||
import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
/**
|
||||
* A callable task that will process a given set of NVD CVE xml files and update the Cve Database accordingly.
|
||||
* A callable task that will process a given set of NVD CVE xml files and update
|
||||
* the Cve Database accordingly.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -91,9 +92,11 @@ public class ProcessTask implements Callable<ProcessTask> {
|
||||
* Constructs a new ProcessTask used to process an NVD CVE update.
|
||||
*
|
||||
* @param cveDB the data store object
|
||||
* @param filePair the download task that contains the URL references to download
|
||||
* @param settings a reference to the global settings object; this is necessary so that when the thread is started the
|
||||
* dependencies have a correct reference to the global settings.
|
||||
* @param filePair the download task that contains the URL references to
|
||||
* download
|
||||
* @param settings a reference to the global settings object; this is
|
||||
* necessary so that when the thread is started the dependencies have a
|
||||
* correct reference to the global settings.
|
||||
*/
|
||||
public ProcessTask(final CveDB cveDB, final DownloadTask filePair, Settings settings) {
|
||||
this.cveDB = cveDB;
|
||||
@@ -106,8 +109,8 @@ public class ProcessTask implements Callable<ProcessTask> {
|
||||
* Implements the callable interface.
|
||||
*
|
||||
* @return this object
|
||||
* @throws Exception thrown if there is an exception; note that any UpdateExceptions are simply added to the tasks exception
|
||||
* collection
|
||||
* @throws Exception thrown if there is an exception; note that any
|
||||
* UpdateExceptions are simply added to the tasks exception collection
|
||||
*/
|
||||
@Override
|
||||
public ProcessTask call() throws Exception {
|
||||
@@ -127,18 +130,19 @@ public class ProcessTask implements Callable<ProcessTask> {
|
||||
*
|
||||
* @param file the file containing the NVD CVE XML
|
||||
* @param oldVersion contains the file containing the NVD CVE XML 1.2
|
||||
* @throws ParserConfigurationException is thrown if there is a parser configuration exception
|
||||
* @throws ParserConfigurationException is thrown if there is a parser
|
||||
* configuration exception
|
||||
* @throws SAXException is thrown if there is a SAXException
|
||||
* @throws IOException is thrown if there is a IO Exception
|
||||
* @throws SQLException is thrown if there is a SQL exception
|
||||
* @throws DatabaseException is thrown if there is a database exception
|
||||
* @throws ClassNotFoundException thrown if the h2 database driver cannot be loaded
|
||||
* @throws ClassNotFoundException thrown if the h2 database driver cannot be
|
||||
* loaded
|
||||
*/
|
||||
protected void importXML(File file, File oldVersion) throws ParserConfigurationException,
|
||||
SAXException, IOException, SQLException, DatabaseException, ClassNotFoundException {
|
||||
|
||||
final SAXParserFactory factory = SAXParserFactory.newInstance();
|
||||
final SAXParser saxParser = factory.newSAXParser();
|
||||
final SAXParser saxParser = XmlUtils.buildSecureSaxParser();
|
||||
|
||||
final NvdCve12Handler cve12Handler = new NvdCve12Handler();
|
||||
saxParser.parse(oldVersion, cve12Handler);
|
||||
@@ -153,7 +157,8 @@ public class ProcessTask implements Callable<ProcessTask> {
|
||||
/**
|
||||
* Processes the NVD CVE XML file and imports the data into the DB.
|
||||
*
|
||||
* @throws UpdateException thrown if there is an error loading the data into the database
|
||||
* @throws UpdateException thrown if there is an error loading the data into
|
||||
* the database
|
||||
*/
|
||||
private void processFiles() throws UpdateException {
|
||||
LOGGER.info("Processing Started for NVD CVE - {}", filePair.getNvdCveInfo().getId());
|
||||
@@ -180,6 +185,6 @@ public class ProcessTask implements Callable<ProcessTask> {
|
||||
filePair.cleanup();
|
||||
}
|
||||
LOGGER.info("Processing Complete for NVD CVE - {} ({} ms)", filePair.getNvdCveInfo().getId(),
|
||||
System.currentTimeMillis() - startProcessing);
|
||||
System.currentTimeMillis() - startProcessing);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,9 +53,9 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
/**
|
||||
* Creates a new Evidence objects.
|
||||
*
|
||||
* @param source the source of the evidence.
|
||||
* @param name the name of the evidence.
|
||||
* @param value the value of the evidence.
|
||||
* @param source the source of the evidence.
|
||||
* @param name the name of the evidence.
|
||||
* @param value the value of the evidence.
|
||||
* @param confidence the confidence of the evidence.
|
||||
*/
|
||||
public Evidence(String source, String name, String value, Confidence confidence) {
|
||||
@@ -127,9 +127,11 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of value. If setUsed is set to false this call to get will not mark the evidence as used.
|
||||
* Get the value of value. If setUsed is set to false this call to get will
|
||||
* not mark the evidence as used.
|
||||
*
|
||||
* @param setUsed whether or not this call to getValue should cause the used flag to be updated
|
||||
* @param setUsed whether or not this call to getValue should cause the used
|
||||
* flag to be updated
|
||||
* @return the value of value
|
||||
*/
|
||||
public String getValue(Boolean setUsed) {
|
||||
@@ -200,11 +202,11 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return new HashCodeBuilder(MAGIC_HASH_INIT_VALUE, MAGIC_HASH_MULTIPLIER)
|
||||
.append(StringUtils.lowerCase(name))
|
||||
.append(StringUtils.lowerCase(source))
|
||||
.append(StringUtils.lowerCase(value))
|
||||
.append(confidence)
|
||||
.toHashCode();
|
||||
.append(StringUtils.lowerCase(name))
|
||||
.append(StringUtils.lowerCase(source))
|
||||
.append(StringUtils.lowerCase(value))
|
||||
.append(confidence)
|
||||
.toHashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -213,6 +215,7 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
* @param that an object to check the equality of.
|
||||
* @return whether the two objects are equal.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public boolean equals(Object that) {
|
||||
if (this == that) {
|
||||
@@ -223,6 +226,8 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
}
|
||||
final Evidence e = (Evidence) that;
|
||||
|
||||
//TODO the call to ObjectUtils.equals needs to be replaced when we
|
||||
//stop supporting Jenkins 1.6 requirement.
|
||||
return StringUtils.equalsIgnoreCase(name, e.name)
|
||||
&& StringUtils.equalsIgnoreCase(source, e.source)
|
||||
&& StringUtils.equalsIgnoreCase(value, e.value)
|
||||
@@ -235,6 +240,7 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
* @param o the evidence being compared
|
||||
* @return an integer indicating the ordering of the two objects
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public int compareTo(Evidence o) {
|
||||
if (o == null) {
|
||||
@@ -243,6 +249,8 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
if (StringUtils.equalsIgnoreCase(source, o.source)) {
|
||||
if (StringUtils.equalsIgnoreCase(name, o.name)) {
|
||||
if (StringUtils.equalsIgnoreCase(value, o.value)) {
|
||||
//TODO the call to ObjectUtils.equals needs to be replaced when we
|
||||
//stop supporting Jenkins 1.6 requirement.
|
||||
if (ObjectUtils.equals(confidence, o.confidence)) {
|
||||
return 0; //they are equal
|
||||
} else {
|
||||
@@ -260,10 +268,11 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around {@link java.lang.String#compareToIgnoreCase(java.lang.String) String.compareToIgnoreCase} with an
|
||||
* exhaustive, possibly duplicative, check against nulls.
|
||||
* Wrapper around
|
||||
* {@link java.lang.String#compareToIgnoreCase(java.lang.String) String.compareToIgnoreCase}
|
||||
* with an exhaustive, possibly duplicative, check against nulls.
|
||||
*
|
||||
* @param me the value to be compared
|
||||
* @param me the value to be compared
|
||||
* @param other the other value to be compared
|
||||
* @return true if the values are equal; otherwise false
|
||||
*/
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
package org.owasp.dependencycheck.dependency;
|
||||
|
||||
import java.io.Serializable;
|
||||
import org.apache.commons.lang3.builder.CompareToBuilder;
|
||||
|
||||
/**
|
||||
* An external reference for a vulnerability. This contains a name, URL, and a
|
||||
@@ -141,18 +142,10 @@ public class Reference implements Serializable, Comparable<Reference> {
|
||||
*/
|
||||
@Override
|
||||
public int compareTo(Reference o) {
|
||||
if (source.equals(o.source)) {
|
||||
if (name.equals(o.name)) {
|
||||
if (url.equals(o.url)) {
|
||||
return 0; //they are equal
|
||||
} else {
|
||||
return url.compareTo(o.url);
|
||||
}
|
||||
} else {
|
||||
return name.compareTo(o.name);
|
||||
}
|
||||
} else {
|
||||
return source.compareTo(o.source);
|
||||
}
|
||||
return new CompareToBuilder()
|
||||
.append(source, o.source)
|
||||
.append(name, o.name)
|
||||
.append(url, o.url)
|
||||
.toComparison();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.io.Serializable;
|
||||
import java.util.Set;
|
||||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
import org.apache.commons.lang3.builder.CompareToBuilder;
|
||||
|
||||
/**
|
||||
* Contains the information about a vulnerability.
|
||||
@@ -161,7 +162,8 @@ public class Vulnerability implements Serializable, Comparable<Vulnerability> {
|
||||
* Adds an entry for vulnerable software.
|
||||
*
|
||||
* @param cpe string representation of a cpe
|
||||
* @param previousVersion the previous version (previousVersion - cpe would be considered vulnerable)
|
||||
* @param previousVersion the previous version (previousVersion - cpe would
|
||||
* be considered vulnerable)
|
||||
* @return if the add succeeded
|
||||
*/
|
||||
public boolean addVulnerableSoftware(String cpe, String previousVersion) {
|
||||
@@ -390,28 +392,32 @@ public class Vulnerability implements Serializable, Comparable<Vulnerability> {
|
||||
sb.append(this.name);
|
||||
sb.append("\nReferences:\n");
|
||||
for (Reference reference : this.references) {
|
||||
sb.append("=> ");
|
||||
sb.append(reference);
|
||||
sb.append("\n");
|
||||
sb.append("=> ");
|
||||
sb.append(reference);
|
||||
sb.append("\n");
|
||||
}
|
||||
sb.append("\nSoftware:\n");
|
||||
for (VulnerableSoftware software : this.vulnerableSoftware) {
|
||||
sb.append("=> ");
|
||||
sb.append(software);
|
||||
sb.append("\n");
|
||||
sb.append("=> ");
|
||||
sb.append(software);
|
||||
sb.append("\n");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares two vulnerabilities.
|
||||
*
|
||||
* @param v a vulnerability to be compared
|
||||
* @return a negative integer, zero, or a positive integer as this object is less than, equal to, or greater than
|
||||
* the specified vulnerability
|
||||
* @return a negative integer, zero, or a positive integer as this object is
|
||||
* less than, equal to, or greater than the specified vulnerability
|
||||
*/
|
||||
@Override
|
||||
public int compareTo(Vulnerability v) {
|
||||
return v.getName().compareTo(this.getName());
|
||||
return new CompareToBuilder()
|
||||
.append(this.name, v.name)
|
||||
.toComparison();
|
||||
//return v.getName().compareTo(this.getName());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -427,8 +433,8 @@ public class Vulnerability implements Serializable, Comparable<Vulnerability> {
|
||||
* Sets the CPE that caused this vulnerability to be flagged.
|
||||
*
|
||||
* @param cpeId a CPE identifier
|
||||
* @param previous a flag indicating whether or not all previous versions were affected (any non-null value is
|
||||
* considered true)
|
||||
* @param previous a flag indicating whether or not all previous versions
|
||||
* were affected (any non-null value is considered true)
|
||||
*/
|
||||
public void setMatchedCPE(String cpeId, String previous) {
|
||||
matchedCPE = cpeId;
|
||||
|
||||
@@ -226,14 +226,24 @@ public class VulnerableSoftware extends IndexEntry implements Serializable, Comp
|
||||
|
||||
/**
|
||||
* Determines if the string passed in is a positive integer.
|
||||
* To be counted as a positive integer, the string must only contain 0-9
|
||||
* and must not have any leading zeros (though "0" is a valid positive
|
||||
* integer).
|
||||
*
|
||||
* @param str the string to test
|
||||
* @return true if the string only contains 0-9, otherwise false.
|
||||
*/
|
||||
private static boolean isPositiveInteger(final String str) {
|
||||
static boolean isPositiveInteger(final String str) {
|
||||
if (str == null || str.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// numbers with leading zeros should not be treated as numbers
|
||||
// (e.g. when comparing "01" <-> "1")
|
||||
if (str.charAt(0) == '0' && str.length() > 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (int i = 0; i < str.length(); i++) {
|
||||
final char c = str.charAt(i);
|
||||
if (c < '0' || c > '9') {
|
||||
|
||||
@@ -25,7 +25,7 @@ import java.util.List;
|
||||
/**
|
||||
* A collection of several exceptions.
|
||||
*
|
||||
* @author Jeremy Lomg
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class ExceptionCollection extends Exception {
|
||||
|
||||
@@ -54,7 +54,7 @@ public class ExceptionCollection extends Exception {
|
||||
* Instantiates a new exception collection.
|
||||
*
|
||||
* @param exceptions a list of exceptions
|
||||
* @param fatal indicates if the exception that occurred is fatal - meaning
|
||||
* @param fatal indicates if any of the exceptions that occurred is fatal - meaning
|
||||
* that no analysis was performed.
|
||||
*/
|
||||
public ExceptionCollection(List<Throwable> exceptions, boolean fatal) {
|
||||
@@ -68,7 +68,7 @@ public class ExceptionCollection extends Exception {
|
||||
*
|
||||
* @param msg the exception message
|
||||
* @param exceptions a list of exceptions
|
||||
* @param fatal indicates if the exception that occurred is fatal - meaning
|
||||
* @param fatal indicates if any of the exceptions that occurred is fatal - meaning
|
||||
* that no analysis was performed.
|
||||
*/
|
||||
public ExceptionCollection(String msg, List<Throwable> exceptions, boolean fatal) {
|
||||
@@ -90,7 +90,8 @@ public class ExceptionCollection extends Exception {
|
||||
this.exceptions.add(exceptions);
|
||||
this.fatal = fatal;
|
||||
}
|
||||
/**
|
||||
|
||||
/**
|
||||
* Instantiates a new exception collection.
|
||||
*
|
||||
* @param msg the exception message
|
||||
@@ -180,7 +181,7 @@ public class ExceptionCollection extends Exception {
|
||||
*/
|
||||
@Override
|
||||
public void printStackTrace(PrintWriter s) {
|
||||
s.println("Multiple Exceptions Occured");
|
||||
s.println("Multiple Exceptions Occurred");
|
||||
super.printStackTrace(s);
|
||||
for (Throwable t : this.exceptions) {
|
||||
s.println("Next Exception:");
|
||||
@@ -195,7 +196,7 @@ public class ExceptionCollection extends Exception {
|
||||
*/
|
||||
@Override
|
||||
public void printStackTrace(PrintStream s) {
|
||||
s.println("Multiple Exceptions Occured");
|
||||
s.println("Multiple Exceptions Occurred");
|
||||
super.printStackTrace(s);
|
||||
for (Throwable t : this.exceptions) {
|
||||
s.println("Next Exception:");
|
||||
@@ -204,11 +205,23 @@ public class ExceptionCollection extends Exception {
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints the stack trace to standard error.
|
||||
* Returns the error message, including the message from all contained
|
||||
* exceptions.
|
||||
*
|
||||
* @return the error message
|
||||
*/
|
||||
@Override
|
||||
public void printStackTrace() {
|
||||
this.printStackTrace(System.err);
|
||||
public String getMessage() {
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
final String msg = super.getMessage();
|
||||
if (msg == null || msg.isEmpty()) {
|
||||
sb.append("One or more exceptions occurred during analysis:");
|
||||
} else {
|
||||
sb.append(msg);
|
||||
}
|
||||
for (Throwable t : this.exceptions) {
|
||||
sb.append("\n\t").append(t.getMessage());
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -27,14 +27,14 @@ import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import org.apache.velocity.VelocityContext;
|
||||
import org.apache.velocity.app.VelocityEngine;
|
||||
import org.apache.velocity.context.Context;
|
||||
import org.apache.velocity.runtime.RuntimeConstants;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.format.DateTimeFormat;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
import org.owasp.dependencycheck.analyzer.Analyzer;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
@@ -103,14 +103,18 @@ public class ReportGenerator {
|
||||
context = createContext();
|
||||
|
||||
velocityEngine.init();
|
||||
|
||||
final DateFormat dateFormat = new SimpleDateFormat("MMM d, yyyy 'at' HH:mm:ss z");
|
||||
final DateFormat dateFormatXML = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ");
|
||||
final Date d = new Date();
|
||||
final String scanDate = dateFormat.format(d);
|
||||
final String scanDateXML = dateFormatXML.format(d);
|
||||
final EscapeTool enc = new EscapeTool();
|
||||
|
||||
final DateTime dt = new DateTime();
|
||||
final DateTimeFormatter dateFormat = DateTimeFormat.forPattern("MMM d, yyyy 'at' HH:mm:ss z");
|
||||
final DateTimeFormatter dateFormatXML = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ");
|
||||
|
||||
// final Date d = new Date();
|
||||
// final DateFormat dateFormat = new SimpleDateFormat("MMM d, yyyy 'at' HH:mm:ss z");
|
||||
// final DateFormat dateFormatXML = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ");
|
||||
final String scanDate = dateFormat.print(dt);
|
||||
final String scanDateXML = dateFormatXML.print(dt);
|
||||
|
||||
context.put("applicationName", applicationName);
|
||||
context.put("dependencies", dependencies);
|
||||
context.put("analyzers", analyzers);
|
||||
@@ -167,7 +171,8 @@ public class ReportGenerator {
|
||||
*
|
||||
* @param outputDir the path where the reports should be written
|
||||
* @param format the format the report should be written in
|
||||
* @throws ReportException is thrown if there is an error writing out the reports
|
||||
* @throws ReportException is thrown if there is an error writing out the
|
||||
* reports
|
||||
*/
|
||||
public void generateReports(String outputDir, Format format) throws ReportException {
|
||||
if (format == Format.XML || format == Format.ALL) {
|
||||
|
||||
@@ -26,14 +26,15 @@ import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Simple object to track the parts of a version number. The parts are contained in a List such that version 1.2.3 will
|
||||
* be stored as: <code>versionParts[0] = 1;
|
||||
* Simple object to track the parts of a version number. The parts are contained
|
||||
* in a List such that version 1.2.3 will be stored as: <code>versionParts[0] = 1;
|
||||
* versionParts[1] = 2;
|
||||
* versionParts[2] = 3;
|
||||
* </code></p>
|
||||
* <p>
|
||||
* Note, the parser contained in this class expects the version numbers to be separated by periods. If a different
|
||||
* separator is used the parser will likely fail.</p>
|
||||
* Note, the parser contained in this class expects the version numbers to be
|
||||
* separated by periods. If a different separator is used the parser will likely
|
||||
* fail.</p>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -47,8 +48,9 @@ public class DependencyVersion implements Iterable<String>, Comparable<Dependenc
|
||||
|
||||
/**
|
||||
* Constructor for a DependencyVersion that will parse a version string.
|
||||
* <b>Note</b>, this should only be used when the version passed in is already known to be a well formatted version
|
||||
* number. Otherwise, DependencyVersionUtil.parseVersion() should be used instead.
|
||||
* <b>Note</b>, this should only be used when the version passed in is
|
||||
* already known to be a well formatted version number. Otherwise,
|
||||
* DependencyVersionUtil.parseVersion() should be used instead.
|
||||
*
|
||||
* @param version the well formatted version number to parse
|
||||
*/
|
||||
@@ -57,8 +59,9 @@ public class DependencyVersion implements Iterable<String>, Comparable<Dependenc
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a version string into its sub parts: major, minor, revision, build, etc. <b>Note</b>, this should only be
|
||||
* used to parse something that is already known to be a version number.
|
||||
* Parses a version string into its sub parts: major, minor, revision,
|
||||
* build, etc. <b>Note</b>, this should only be used to parse something that
|
||||
* is already known to be a version number.
|
||||
*
|
||||
* @param version the version string to parse
|
||||
*/
|
||||
@@ -133,26 +136,33 @@ public class DependencyVersion implements Iterable<String>, Comparable<Dependenc
|
||||
return false;
|
||||
}
|
||||
final DependencyVersion other = (DependencyVersion) obj;
|
||||
final int max = (this.versionParts.size() < other.versionParts.size())
|
||||
final int minVersionMatchLength = (this.versionParts.size() < other.versionParts.size())
|
||||
? this.versionParts.size() : other.versionParts.size();
|
||||
final int maxVersionMatchLength = (this.versionParts.size() > other.versionParts.size())
|
||||
? this.versionParts.size() : other.versionParts.size();
|
||||
|
||||
if (minVersionMatchLength == 1 && maxVersionMatchLength >= 3) {
|
||||
return false;
|
||||
}
|
||||
|
||||
//TODO steal better version of code from compareTo
|
||||
for (int i = 0; i < max; i++) {
|
||||
for (int i = 0; i < minVersionMatchLength; i++) {
|
||||
final String thisPart = this.versionParts.get(i);
|
||||
final String otherPart = other.versionParts.get(i);
|
||||
if (!thisPart.equals(otherPart)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (this.versionParts.size() > max) {
|
||||
for (int i = max; i < this.versionParts.size(); i++) {
|
||||
if (this.versionParts.size() > minVersionMatchLength) {
|
||||
for (int i = minVersionMatchLength; i < this.versionParts.size(); i++) {
|
||||
if (!"0".equals(this.versionParts.get(i))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (other.versionParts.size() > max) {
|
||||
for (int i = max; i < other.versionParts.size(); i++) {
|
||||
if (other.versionParts.size() > minVersionMatchLength) {
|
||||
for (int i = minVersionMatchLength; i < other.versionParts.size(); i++) {
|
||||
if (!"0".equals(other.versionParts.get(i))) {
|
||||
return false;
|
||||
}
|
||||
@@ -180,8 +190,9 @@ public class DependencyVersion implements Iterable<String>, Comparable<Dependenc
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the three most major major version parts are identical. For instances, if version 1.2.3.4 was
|
||||
* compared to 1.2.3 this function would return true.
|
||||
* Determines if the three most major major version parts are identical. For
|
||||
* instances, if version 1.2.3.4 was compared to 1.2.3 this function would
|
||||
* return true.
|
||||
*
|
||||
* @param version the version number to compare
|
||||
* @return true if the first three major parts of the version are identical
|
||||
|
||||
@@ -24,7 +24,8 @@ import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* A utility class to extract version numbers from file names (or other strings containing version numbers.</p>
|
||||
* A utility class to extract version numbers from file names (or other strings
|
||||
* containing version numbers.</p>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -35,11 +36,19 @@ public final class DependencyVersionUtil {
|
||||
*/
|
||||
private static final Pattern RX_VERSION = Pattern.compile("\\d+(\\.\\d{1,6})+(\\.?([_-](release|beta|alpha|\\d+)|[a-zA-Z_-]{1,3}\\d{0,8}))?");
|
||||
/**
|
||||
* Regular expression to extract a single version number without periods. This is a last ditch effort just to check in case we
|
||||
* are missing a version number using the previous regex.
|
||||
* Regular expression to extract a single version number without periods.
|
||||
* This is a last ditch effort just to check in case we are missing a
|
||||
* version number using the previous regex.
|
||||
*/
|
||||
private static final Pattern RX_SINGLE_VERSION = Pattern.compile("\\d+(\\.?([_-](release|beta|alpha)|[a-zA-Z_-]{1,3}\\d{1,8}))?");
|
||||
|
||||
/**
|
||||
* Regular expression to extract the part before the version numbers if
|
||||
* there are any based on RX_VERSION. In most cases, this part represents a
|
||||
* more accurate name.
|
||||
*/
|
||||
private static final Pattern RX_PRE_VERSION = Pattern.compile("^(.+)[_-](\\d+\\.\\d{1,6})+");
|
||||
|
||||
/**
|
||||
* Private constructor for utility class.
|
||||
*/
|
||||
@@ -48,7 +57,8 @@ public final class DependencyVersionUtil {
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* A utility class to extract version numbers from file names (or other strings containing version numbers.</p>
|
||||
* A utility class to extract version numbers from file names (or other
|
||||
* strings containing version numbers.</p>
|
||||
* <pre>
|
||||
* Example:
|
||||
* Give the file name: library-name-1.4.1r2-release.jar
|
||||
@@ -95,4 +105,30 @@ public final class DependencyVersionUtil {
|
||||
}
|
||||
return new DependencyVersion(version);
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* A utility class to extract the part before version numbers from file
|
||||
* names (or other strings containing version numbers. In most cases, this
|
||||
* part represents a more accurate name than the full file name.</p>
|
||||
* <pre>
|
||||
* Example:
|
||||
* Give the file name: library-name-1.4.1r2-release.jar
|
||||
* This function would return: library-name</pre>
|
||||
*
|
||||
* @param text the text being analyzed
|
||||
* @return the part before the version numbers if any, otherwise return the
|
||||
* text itself.
|
||||
*/
|
||||
public static String parsePreVersion(String text) {
|
||||
if (parseVersion(text) == null) {
|
||||
return text;
|
||||
}
|
||||
|
||||
final Matcher matcher = RX_PRE_VERSION.matcher(text);
|
||||
if (matcher.find()) {
|
||||
return matcher.group(1);
|
||||
}
|
||||
return text;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,13 +18,13 @@
|
||||
package org.owasp.dependencycheck.utils;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.Closeable;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FilenameFilter;
|
||||
import java.io.IOException;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipInputStream;
|
||||
|
||||
@@ -61,20 +61,24 @@ public final class ExtractionUtil {
|
||||
*
|
||||
* @param archive an archive file such as a WAR or EAR
|
||||
* @param extractTo a directory to extract the contents to
|
||||
* @throws ExtractionException thrown if an exception occurs while extracting the files
|
||||
* @throws ExtractionException thrown if an exception occurs while
|
||||
* extracting the files
|
||||
*/
|
||||
public static void extractFiles(File archive, File extractTo) throws ExtractionException {
|
||||
extractFiles(archive, extractTo, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the contents of an archive into the specified directory. The files are only extracted if they are supported by the
|
||||
* analyzers loaded into the specified engine. If the engine is specified as null then all files are extracted.
|
||||
* Extracts the contents of an archive into the specified directory. The
|
||||
* files are only extracted if they are supported by the analyzers loaded
|
||||
* into the specified engine. If the engine is specified as null then all
|
||||
* files are extracted.
|
||||
*
|
||||
* @param archive an archive file such as a WAR or EAR
|
||||
* @param extractTo a directory to extract the contents to
|
||||
* @param engine the scanning engine
|
||||
* @throws ExtractionException thrown if there is an error extracting the files
|
||||
* @throws ExtractionException thrown if there is an error extracting the
|
||||
* files
|
||||
*/
|
||||
public static void extractFiles(File archive, File extractTo, Engine engine) throws ExtractionException {
|
||||
if (archive == null || extractTo == null) {
|
||||
@@ -116,7 +120,7 @@ public final class ExtractionUtil {
|
||||
final String msg = String.format("IO Exception while parsing file '%s'.", file.getName());
|
||||
throw new ExtractionException(msg, ex);
|
||||
} finally {
|
||||
closeStream(fos);
|
||||
FileUtils.close(fos);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -126,7 +130,7 @@ public final class ExtractionUtil {
|
||||
LOGGER.debug("", ex);
|
||||
throw new ExtractionException(msg, ex);
|
||||
} finally {
|
||||
closeStream(zis);
|
||||
FileUtils.close(zis);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -172,7 +176,8 @@ public final class ExtractionUtil {
|
||||
* @param input the archive to extract files from
|
||||
* @param destination the location to write the files too
|
||||
* @param filter determines which files get extracted
|
||||
* @throws ArchiveExtractionException thrown if there is an exception extracting files from the archive
|
||||
* @throws ArchiveExtractionException thrown if there is an exception
|
||||
* extracting files from the archive
|
||||
*/
|
||||
private static void extractArchive(ArchiveInputStream input,
|
||||
File destination, FilenameFilter filter)
|
||||
@@ -197,18 +202,20 @@ public final class ExtractionUtil {
|
||||
} catch (Throwable ex) {
|
||||
throw new ArchiveExtractionException(ex);
|
||||
} finally {
|
||||
closeStream(input);
|
||||
FileUtils.close(input);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts a file from an archive (input stream) and correctly builds the directory structure.
|
||||
* Extracts a file from an archive (input stream) and correctly builds the
|
||||
* directory structure.
|
||||
*
|
||||
* @param input the archive input stream
|
||||
* @param destination where to write the file
|
||||
* @param filter the file filter to apply to the files being extracted
|
||||
* @param entry the entry from the archive to extract
|
||||
* @throws ExtractionException thrown if there is an error reading from the archive stream
|
||||
* @throws ExtractionException thrown if there is an error reading from the
|
||||
* archive stream
|
||||
*/
|
||||
private static void extractFile(ArchiveInputStream input, File destination,
|
||||
FilenameFilter filter, ArchiveEntry entry) throws ExtractionException {
|
||||
@@ -233,31 +240,18 @@ public final class ExtractionUtil {
|
||||
file.getName());
|
||||
throw new ExtractionException(msg, ex);
|
||||
} finally {
|
||||
closeStream(fos);
|
||||
FileUtils.close(fos);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the stream.
|
||||
*
|
||||
* @param stream the stream to close
|
||||
*/
|
||||
private static void closeStream(Closeable stream) {
|
||||
if (stream != null) {
|
||||
try {
|
||||
stream.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures the parent path is correctly created on disk so that the file can be extracted to the correct location.
|
||||
* Ensures the parent path is correctly created on disk so that the file can
|
||||
* be extracted to the correct location.
|
||||
*
|
||||
* @param file the file path
|
||||
* @throws ExtractionException thrown if the parent paths could not be created
|
||||
* @throws ExtractionException thrown if the parent paths could not be
|
||||
* created
|
||||
*/
|
||||
private static void createParentFile(final File file)
|
||||
throws ExtractionException {
|
||||
@@ -269,4 +263,58 @@ public final class ExtractionUtil {
|
||||
throw new ExtractionException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the file contained in a gzip archive. The extracted file is
|
||||
* placed in the exact same path as the file specified.
|
||||
*
|
||||
* @param file the archive file
|
||||
* @throws FileNotFoundException thrown if the file does not exist
|
||||
* @throws IOException thrown if there is an error extracting the file.
|
||||
*/
|
||||
public static void extractGzip(File file) throws FileNotFoundException, IOException {
|
||||
final String originalPath = file.getPath();
|
||||
final File gzip = new File(originalPath + ".gz");
|
||||
if (gzip.isFile() && !gzip.delete()) {
|
||||
LOGGER.debug("Failed to delete initial temporary file when extracting 'gz' {}", gzip.toString());
|
||||
gzip.deleteOnExit();
|
||||
}
|
||||
if (!file.renameTo(gzip)) {
|
||||
throw new IOException("Unable to rename '" + file.getPath() + "'");
|
||||
}
|
||||
final File newfile = new File(originalPath);
|
||||
|
||||
final byte[] buffer = new byte[4096];
|
||||
|
||||
GZIPInputStream cin = null;
|
||||
FileOutputStream out = null;
|
||||
try {
|
||||
cin = new GZIPInputStream(new FileInputStream(gzip));
|
||||
out = new FileOutputStream(newfile);
|
||||
|
||||
int len;
|
||||
while ((len = cin.read(buffer)) > 0) {
|
||||
out.write(buffer, 0, len);
|
||||
}
|
||||
} finally {
|
||||
if (cin != null) {
|
||||
try {
|
||||
cin.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("ignore", ex);
|
||||
}
|
||||
}
|
||||
if (out != null) {
|
||||
try {
|
||||
out.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("ignore", ex);
|
||||
}
|
||||
}
|
||||
if (gzip.isFile() && !org.apache.commons.io.FileUtils.deleteQuietly(gzip)) {
|
||||
LOGGER.debug("Failed to delete temporary file when extracting 'gz' {}", gzip.toString());
|
||||
gzip.deleteOnExit();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.xml.hints;
|
||||
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.xml.sax.ErrorHandler;
|
||||
@@ -35,33 +36,6 @@ public class HintErrorHandler implements ErrorHandler {
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(HintErrorHandler.class);
|
||||
|
||||
/**
|
||||
* Builds a prettier exception message.
|
||||
*
|
||||
* @param ex the SAXParseException
|
||||
* @return an easier to read exception message
|
||||
*/
|
||||
private String getPrettyParseExceptionInfo(SAXParseException ex) {
|
||||
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
|
||||
if (ex.getSystemId() != null) {
|
||||
sb.append("systemId=").append(ex.getSystemId()).append(", ");
|
||||
}
|
||||
if (ex.getPublicId() != null) {
|
||||
sb.append("publicId=").append(ex.getPublicId()).append(", ");
|
||||
}
|
||||
if (ex.getLineNumber() > 0) {
|
||||
sb.append("Line=").append(ex.getLineNumber());
|
||||
}
|
||||
if (ex.getColumnNumber() > 0) {
|
||||
sb.append(", Column=").append(ex.getColumnNumber());
|
||||
}
|
||||
sb.append(": ").append(ex.getMessage());
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs warnings.
|
||||
*
|
||||
@@ -81,7 +55,7 @@ public class HintErrorHandler implements ErrorHandler {
|
||||
*/
|
||||
@Override
|
||||
public void error(SAXParseException ex) throws SAXException {
|
||||
throw new SAXException(getPrettyParseExceptionInfo(ex));
|
||||
throw new SAXException(XmlUtils.getPrettyParseExceptionInfo(ex));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -92,6 +66,6 @@ public class HintErrorHandler implements ErrorHandler {
|
||||
*/
|
||||
@Override
|
||||
public void fatalError(SAXParseException ex) throws SAXException {
|
||||
throw new SAXException(getPrettyParseExceptionInfo(ex));
|
||||
throw new SAXException(XmlUtils.getPrettyParseExceptionInfo(ex));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -62,9 +62,17 @@ public class HintHandler extends DefaultHandler {
|
||||
*/
|
||||
private static final String DUPLICATE = "duplicate";
|
||||
/**
|
||||
* Attribute name.
|
||||
* Attribute value.
|
||||
*/
|
||||
private static final String VENDOR = "vendor";
|
||||
/**
|
||||
* Attribute value.
|
||||
*/
|
||||
private static final String PRODUCT = "product";
|
||||
/**
|
||||
* Attribute value.
|
||||
*/
|
||||
private static final String VERSION = "version";
|
||||
/**
|
||||
* Attribute name.
|
||||
*/
|
||||
@@ -168,16 +176,25 @@ public class HintHandler extends DefaultHandler {
|
||||
attr.getValue(VALUE),
|
||||
Confidence.valueOf(attr.getValue(CONFIDENCE)));
|
||||
}
|
||||
} else if (inAddNode) {
|
||||
rule.addAddProduct(attr.getValue(SOURCE),
|
||||
attr.getValue(NAME),
|
||||
attr.getValue(VALUE),
|
||||
Confidence.valueOf(attr.getValue(CONFIDENCE)));
|
||||
} else {
|
||||
rule.addGivenProduct(attr.getValue(SOURCE),
|
||||
attr.getValue(NAME),
|
||||
attr.getValue(VALUE),
|
||||
Confidence.valueOf(attr.getValue(CONFIDENCE)));
|
||||
} else if (PRODUCT.equals(hintType)) {
|
||||
if (inAddNode) {
|
||||
rule.addAddProduct(attr.getValue(SOURCE),
|
||||
attr.getValue(NAME),
|
||||
attr.getValue(VALUE),
|
||||
Confidence.valueOf(attr.getValue(CONFIDENCE)));
|
||||
} else {
|
||||
rule.addGivenProduct(attr.getValue(SOURCE),
|
||||
attr.getValue(NAME),
|
||||
attr.getValue(VALUE),
|
||||
Confidence.valueOf(attr.getValue(CONFIDENCE)));
|
||||
}
|
||||
} else if (VERSION.equals(hintType)) {
|
||||
if (inAddNode) {
|
||||
rule.addAddVersion(attr.getValue(SOURCE),
|
||||
attr.getValue(NAME),
|
||||
attr.getValue(VALUE),
|
||||
Confidence.valueOf(attr.getValue(CONFIDENCE)));
|
||||
}
|
||||
}
|
||||
} else if (FILE_NAME.equals(qName)) {
|
||||
final PropertyType pt = new PropertyType();
|
||||
|
||||
@@ -26,7 +26,7 @@ import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.parsers.SAXParser;
|
||||
import javax.xml.parsers.SAXParserFactory;
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -64,7 +64,7 @@ public class HintParser {
|
||||
/**
|
||||
* The schema for the hint XML files.
|
||||
*/
|
||||
private static final String HINT_SCHEMA = "schema/dependency-hint.1.0.xsd";
|
||||
private static final String HINT_SCHEMA = "schema/dependency-hint.1.1.xsd";
|
||||
|
||||
/**
|
||||
* Parses the given XML file and returns a list of the hints contained.
|
||||
@@ -104,15 +104,11 @@ public class HintParser {
|
||||
* @throws SAXException thrown if the XML cannot be parsed
|
||||
*/
|
||||
public Hints parseHints(InputStream inputStream) throws HintParseException, SAXException {
|
||||
InputStream schemaStream = null;
|
||||
try {
|
||||
final InputStream schemaStream = this.getClass().getClassLoader().getResourceAsStream(HINT_SCHEMA);
|
||||
schemaStream = this.getClass().getClassLoader().getResourceAsStream(HINT_SCHEMA);
|
||||
final HintHandler handler = new HintHandler();
|
||||
final SAXParserFactory factory = SAXParserFactory.newInstance();
|
||||
factory.setNamespaceAware(true);
|
||||
factory.setValidating(true);
|
||||
final SAXParser saxParser = factory.newSAXParser();
|
||||
saxParser.setProperty(HintParser.JAXP_SCHEMA_LANGUAGE, HintParser.W3C_XML_SCHEMA);
|
||||
saxParser.setProperty(HintParser.JAXP_SCHEMA_SOURCE, new InputSource(schemaStream));
|
||||
final SAXParser saxParser = XmlUtils.buildSecureSaxParser(schemaStream);
|
||||
final XMLReader xmlReader = saxParser.getXMLReader();
|
||||
xmlReader.setErrorHandler(new HintErrorHandler());
|
||||
xmlReader.setContentHandler(handler);
|
||||
@@ -141,6 +137,14 @@ public class HintParser {
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new HintParseException(ex);
|
||||
} finally {
|
||||
if (schemaStream != null) {
|
||||
try {
|
||||
schemaStream.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Error closing hint file stream", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -85,6 +85,15 @@ public class HintRule {
|
||||
*/
|
||||
private final List<Evidence> givenVendor = new ArrayList<Evidence>();
|
||||
|
||||
/**
|
||||
* The list of product evidence to add.
|
||||
*/
|
||||
private final List<Evidence> addProduct = new ArrayList<Evidence>();
|
||||
/**
|
||||
* The list of version evidence to add.
|
||||
*/
|
||||
private final List<Evidence> addVersion = new ArrayList<Evidence>();
|
||||
|
||||
/**
|
||||
* Adds a given vendors to the list of evidence to matched.
|
||||
*
|
||||
@@ -106,11 +115,6 @@ public class HintRule {
|
||||
return givenVendor;
|
||||
}
|
||||
|
||||
/**
|
||||
* The list of product evidence to add.
|
||||
*/
|
||||
private final List<Evidence> addProduct = new ArrayList<Evidence>();
|
||||
|
||||
/**
|
||||
* Adds a given product to the list of evidence to add when matched.
|
||||
*
|
||||
@@ -132,6 +136,27 @@ public class HintRule {
|
||||
return addProduct;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a given version to the list of evidence to add when matched.
|
||||
*
|
||||
* @param source the source of the evidence
|
||||
* @param name the name of the evidence
|
||||
* @param value the value of the evidence
|
||||
* @param confidence the confidence of the evidence
|
||||
*/
|
||||
public void addAddVersion(String source, String name, String value, Confidence confidence) {
|
||||
addVersion.add(new Evidence(source, name, value, confidence));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of addVersion.
|
||||
*
|
||||
* @return the value of addVersion
|
||||
*/
|
||||
public List<Evidence> getAddVersion() {
|
||||
return addVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* The list of vendor hints to add.
|
||||
*/
|
||||
|
||||
@@ -348,7 +348,7 @@ public class Model {
|
||||
* Utility class that can provide values from a Properties object to a
|
||||
* StrSubstitutor.
|
||||
*/
|
||||
private static class PropertyLookup extends StrLookup {
|
||||
private static class PropertyLookup extends StrLookup<String> {
|
||||
|
||||
/**
|
||||
* Reference to the properties to lookup.
|
||||
|
||||
@@ -26,7 +26,7 @@ import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.parsers.SAXParser;
|
||||
import javax.xml.parsers.SAXParserFactory;
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -47,10 +47,12 @@ public class PomParser {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(PomParser.class);
|
||||
|
||||
/**
|
||||
* Parses the given xml file and returns a Model object containing only the fields dependency-check requires.
|
||||
* Parses the given xml file and returns a Model object containing only the
|
||||
* fields dependency-check requires.
|
||||
*
|
||||
* @param file a pom.xml
|
||||
* @return a Model object containing only the fields dependency-check requires
|
||||
* @return a Model object containing only the fields dependency-check
|
||||
* requires
|
||||
* @throws PomParseException thrown if the xml file cannot be parsed
|
||||
*/
|
||||
public Model parse(File file) throws PomParseException {
|
||||
@@ -73,7 +75,8 @@ public class PomParser {
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the given XML file and returns a Model object containing only the fields dependency-check requires.
|
||||
* Parses the given XML file and returns a Model object containing only the
|
||||
* fields dependency-check requires.
|
||||
*
|
||||
* @param inputStream an InputStream containing suppression rues
|
||||
* @return a list of suppression rules
|
||||
@@ -82,19 +85,12 @@ public class PomParser {
|
||||
public Model parse(InputStream inputStream) throws PomParseException {
|
||||
try {
|
||||
final PomHandler handler = new PomHandler();
|
||||
final SAXParserFactory factory = SAXParserFactory.newInstance();
|
||||
// factory.setNamespaceAware(true);
|
||||
// factory.setValidating(true);
|
||||
final SAXParser saxParser = factory.newSAXParser();
|
||||
final SAXParser saxParser = XmlUtils.buildSecureSaxParser();
|
||||
final XMLReader xmlReader = saxParser.getXMLReader();
|
||||
xmlReader.setContentHandler(handler);
|
||||
|
||||
final Reader reader = new InputStreamReader(inputStream, "UTF-8");
|
||||
final InputSource in = new InputSource(reader);
|
||||
//in.setEncoding("UTF-8");
|
||||
|
||||
xmlReader.parse(in);
|
||||
|
||||
return handler.getModel();
|
||||
} catch (ParserConfigurationException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
|
||||
@@ -48,13 +48,17 @@ public final class PomUtils {
|
||||
*
|
||||
* @param file the pom.xml file
|
||||
* @return returns a
|
||||
* @throws AnalysisException is thrown if there is an exception extracting or parsing the POM {@link Model} object
|
||||
* @throws AnalysisException is thrown if there is an exception extracting
|
||||
* or parsing the POM {@link Model} object
|
||||
*/
|
||||
public static Model readPom(File file) throws AnalysisException {
|
||||
Model model = null;
|
||||
try {
|
||||
final PomParser parser = new PomParser();
|
||||
model = parser.parse(file);
|
||||
final Model model = parser.parse(file);
|
||||
if (model == null) {
|
||||
throw new AnalysisException(String.format("Unable to parse pom '%s'", file.getPath()));
|
||||
}
|
||||
return model;
|
||||
} catch (PomParseException ex) {
|
||||
LOGGER.warn("Unable to parse pom '{}'", file.getPath());
|
||||
LOGGER.debug("", ex);
|
||||
@@ -68,7 +72,6 @@ public final class PomUtils {
|
||||
LOGGER.debug("", ex);
|
||||
throw new AnalysisException(ex);
|
||||
}
|
||||
return model;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -77,7 +80,8 @@ public final class PomUtils {
|
||||
* @param path the path to the pom.xml file within the jar file
|
||||
* @param jar the jar file to extract the pom from
|
||||
* @return returns a
|
||||
* @throws AnalysisException is thrown if there is an exception extracting or parsing the POM {@link Model} object
|
||||
* @throws AnalysisException is thrown if there is an exception extracting
|
||||
* or parsing the POM {@link Model} object
|
||||
*/
|
||||
public static Model readPom(String path, JarFile jar) throws AnalysisException {
|
||||
final ZipEntry entry = jar.getEntry(path);
|
||||
@@ -86,7 +90,9 @@ public final class PomUtils {
|
||||
try {
|
||||
final PomParser parser = new PomParser();
|
||||
model = parser.parse(jar.getInputStream(entry));
|
||||
LOGGER.debug("Read POM {}", path);
|
||||
if (model == null) {
|
||||
throw new AnalysisException(String.format("Unable to parse pom '%s/%s'", jar.getName(), path));
|
||||
}
|
||||
} catch (SecurityException ex) {
|
||||
LOGGER.warn("Unable to parse pom '{}' in jar '{}'; invalid signature", path, jar.getName());
|
||||
LOGGER.debug("", ex);
|
||||
@@ -105,11 +111,13 @@ public final class PomUtils {
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads in the pom file and adds elements as evidence to the given dependency.
|
||||
* Reads in the pom file and adds elements as evidence to the given
|
||||
* dependency.
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param pomFile the pom file to read
|
||||
* @throws AnalysisException is thrown if there is an exception parsing the pom
|
||||
* @throws AnalysisException is thrown if there is an exception parsing the
|
||||
* pom
|
||||
*/
|
||||
public static void analyzePOM(Dependency dependency, File pomFile) throws AnalysisException {
|
||||
final Model pom = PomUtils.readPom(pomFile);
|
||||
|
||||
@@ -17,8 +17,7 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.xml.suppression;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
import org.xml.sax.ErrorHandler;
|
||||
import org.xml.sax.SAXException;
|
||||
import org.xml.sax.SAXParseException;
|
||||
@@ -33,34 +32,7 @@ public class SuppressionErrorHandler implements ErrorHandler {
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(SuppressionErrorHandler.class);
|
||||
|
||||
/**
|
||||
* Builds a prettier exception message.
|
||||
*
|
||||
* @param ex the SAXParseException
|
||||
* @return an easier to read exception message
|
||||
*/
|
||||
private String getPrettyParseExceptionInfo(SAXParseException ex) {
|
||||
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
|
||||
if (ex.getSystemId() != null) {
|
||||
sb.append("systemId=").append(ex.getSystemId()).append(", ");
|
||||
}
|
||||
if (ex.getPublicId() != null) {
|
||||
sb.append("publicId=").append(ex.getPublicId()).append(", ");
|
||||
}
|
||||
if (ex.getLineNumber() > 0) {
|
||||
sb.append("Line=").append(ex.getLineNumber());
|
||||
}
|
||||
if (ex.getColumnNumber() > 0) {
|
||||
sb.append(", Column=").append(ex.getColumnNumber());
|
||||
}
|
||||
sb.append(": ").append(ex.getMessage());
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
//private static final Logger LOGGER = LoggerFactory.getLogger(SuppressionErrorHandler.class);
|
||||
|
||||
/**
|
||||
* Logs warnings.
|
||||
@@ -81,7 +53,7 @@ public class SuppressionErrorHandler implements ErrorHandler {
|
||||
*/
|
||||
@Override
|
||||
public void error(SAXParseException ex) throws SAXException {
|
||||
throw new SAXException(getPrettyParseExceptionInfo(ex));
|
||||
throw new SAXException(XmlUtils.getPrettyParseExceptionInfo(ex));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -92,6 +64,6 @@ public class SuppressionErrorHandler implements ErrorHandler {
|
||||
*/
|
||||
@Override
|
||||
public void fatalError(SAXParseException ex) throws SAXException {
|
||||
throw new SAXException(getPrettyParseExceptionInfo(ex));
|
||||
throw new SAXException(XmlUtils.getPrettyParseExceptionInfo(ex));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ import java.io.Reader;
|
||||
import java.util.List;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.parsers.SAXParser;
|
||||
import javax.xml.parsers.SAXParserFactory;
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -46,25 +46,10 @@ public class SuppressionParser {
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(SuppressionParser.class);
|
||||
/**
|
||||
* JAXP Schema Language. Source:
|
||||
* http://docs.oracle.com/javase/tutorial/jaxp/sax/validation.html
|
||||
*/
|
||||
public static final String JAXP_SCHEMA_LANGUAGE = "http://java.sun.com/xml/jaxp/properties/schemaLanguage";
|
||||
/**
|
||||
* W3C XML Schema. Source:
|
||||
* http://docs.oracle.com/javase/tutorial/jaxp/sax/validation.html
|
||||
*/
|
||||
public static final String W3C_XML_SCHEMA = "http://www.w3.org/2001/XMLSchema";
|
||||
/**
|
||||
* JAXP Schema Source. Source:
|
||||
* http://docs.oracle.com/javase/tutorial/jaxp/sax/validation.html
|
||||
*/
|
||||
public static final String JAXP_SCHEMA_SOURCE = "http://java.sun.com/xml/jaxp/properties/schemaSource";
|
||||
/**
|
||||
* The suppression schema file location.
|
||||
*/
|
||||
private static final String SUPPRESSION_SCHEMA = "schema/dependency-suppression.1.1.xsd";
|
||||
public static final String SUPPRESSION_SCHEMA = "schema/dependency-suppression.1.1.xsd";
|
||||
/**
|
||||
* The old suppression schema file location.
|
||||
*/
|
||||
@@ -99,7 +84,11 @@ public class SuppressionParser {
|
||||
} catch (FileNotFoundException ex1) {
|
||||
throw new SuppressionParseException(ex);
|
||||
}
|
||||
return parseOldSuppressionRules(fis);
|
||||
try {
|
||||
return parseSuppressionRules(fis, OLD_SUPPRESSION_SCHEMA);
|
||||
} catch (SAXException ex1) {
|
||||
throw new SuppressionParseException(ex);
|
||||
}
|
||||
} finally {
|
||||
if (fis != null) {
|
||||
try {
|
||||
@@ -121,25 +110,31 @@ public class SuppressionParser {
|
||||
* @throws SAXException thrown if the XML cannot be parsed
|
||||
*/
|
||||
public List<SuppressionRule> parseSuppressionRules(InputStream inputStream) throws SuppressionParseException, SAXException {
|
||||
return parseSuppressionRules(inputStream, SUPPRESSION_SCHEMA);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the given XML stream and returns a list of the suppression rules
|
||||
* contained.
|
||||
*
|
||||
* @param inputStream an InputStream containing suppression rules
|
||||
* @param schema the schema used to validate the XML stream
|
||||
* @return a list of suppression rules
|
||||
* @throws SuppressionParseException thrown if the XML cannot be parsed
|
||||
* @throws SAXException thrown if the XML cannot be parsed
|
||||
*/
|
||||
private List<SuppressionRule> parseSuppressionRules(InputStream inputStream, String schema) throws SuppressionParseException, SAXException {
|
||||
InputStream schemaStream = null;
|
||||
try {
|
||||
final InputStream schemaStream = this.getClass().getClassLoader().getResourceAsStream(SUPPRESSION_SCHEMA);
|
||||
schemaStream = this.getClass().getClassLoader().getResourceAsStream(schema);
|
||||
final SuppressionHandler handler = new SuppressionHandler();
|
||||
final SAXParserFactory factory = SAXParserFactory.newInstance();
|
||||
factory.setNamespaceAware(true);
|
||||
factory.setValidating(true);
|
||||
final SAXParser saxParser = factory.newSAXParser();
|
||||
saxParser.setProperty(SuppressionParser.JAXP_SCHEMA_LANGUAGE, SuppressionParser.W3C_XML_SCHEMA);
|
||||
saxParser.setProperty(SuppressionParser.JAXP_SCHEMA_SOURCE, new InputSource(schemaStream));
|
||||
final SAXParser saxParser = XmlUtils.buildSecureSaxParser(schemaStream);
|
||||
final XMLReader xmlReader = saxParser.getXMLReader();
|
||||
xmlReader.setErrorHandler(new SuppressionErrorHandler());
|
||||
xmlReader.setContentHandler(handler);
|
||||
|
||||
final Reader reader = new InputStreamReader(inputStream, "UTF-8");
|
||||
final InputSource in = new InputSource(reader);
|
||||
//in.setEncoding("UTF-8");
|
||||
|
||||
xmlReader.parse(in);
|
||||
|
||||
return handler.getSuppressionRules();
|
||||
} catch (ParserConfigurationException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
@@ -157,49 +152,14 @@ public class SuppressionParser {
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new SuppressionParseException(ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the given XML stream and returns a list of the suppression rules
|
||||
* contained.
|
||||
*
|
||||
* @param inputStream an InputStream containing suppression rues
|
||||
* @return a list of suppression rules
|
||||
* @throws SuppressionParseException if the XML cannot be parsed
|
||||
*/
|
||||
private List<SuppressionRule> parseOldSuppressionRules(InputStream inputStream) throws SuppressionParseException {
|
||||
try {
|
||||
final InputStream schemaStream = this.getClass().getClassLoader().getResourceAsStream(OLD_SUPPRESSION_SCHEMA);
|
||||
final SuppressionHandler handler = new SuppressionHandler();
|
||||
final SAXParserFactory factory = SAXParserFactory.newInstance();
|
||||
factory.setNamespaceAware(true);
|
||||
factory.setValidating(true);
|
||||
final SAXParser saxParser = factory.newSAXParser();
|
||||
saxParser.setProperty(SuppressionParser.JAXP_SCHEMA_LANGUAGE, SuppressionParser.W3C_XML_SCHEMA);
|
||||
saxParser.setProperty(SuppressionParser.JAXP_SCHEMA_SOURCE, new InputSource(schemaStream));
|
||||
final XMLReader xmlReader = saxParser.getXMLReader();
|
||||
xmlReader.setErrorHandler(new SuppressionErrorHandler());
|
||||
xmlReader.setContentHandler(handler);
|
||||
|
||||
final Reader reader = new InputStreamReader(inputStream, "UTF-8");
|
||||
final InputSource in = new InputSource(reader);
|
||||
|
||||
xmlReader.parse(in);
|
||||
|
||||
return handler.getSuppressionRules();
|
||||
} catch (ParserConfigurationException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new SuppressionParseException(ex);
|
||||
} catch (SAXException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new SuppressionParseException(ex);
|
||||
} catch (FileNotFoundException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new SuppressionParseException(ex);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new SuppressionParseException(ex);
|
||||
} finally {
|
||||
if (schemaStream != null) {
|
||||
try {
|
||||
schemaStream.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Error closing suppression file stream", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Binary file not shown.
@@ -6,6 +6,7 @@ org.owasp.dependencycheck.analyzer.CPEAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.FalsePositiveAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.CpeSuppressionAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.DependencyBundlingAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.DependencyMergingAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.NvdCveAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.VulnerabilitySuppressionAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.CentralAnalyzer
|
||||
@@ -22,3 +23,6 @@ org.owasp.dependencycheck.analyzer.RubyGemspecAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.RubyBundlerAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.RubyBundleAuditAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.ComposerLockAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.CocoaPodsAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.SwiftPackageManagerAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.VersionFilterAnalyzer
|
||||
@@ -1,75 +1,120 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<hints xmlns="https://jeremylong.github.io/DependencyCheck/dependency-hint.1.0.xsd">
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="Manifest" name="Implementation-Title" value="Spring Framework" confidence="HIGH"/>
|
||||
<evidence type="product" source="Manifest" name="Implementation-Title" value="org.springframework.core" confidence="HIGH"/>
|
||||
<evidence type="product" source="Manifest" name="Implementation-Title" value="spring-core" confidence="HIGH"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="SpringSource" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="pivotal" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="jar" name="package name" value="springframework" confidence="LOW"/>
|
||||
<fileName contains="spring"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="SpringSource" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="pivotal" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="jar" name="package name" value="springframework" confidence="LOW"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="pivotal" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="Manifest" name="Bundle-Name" value="Spring Security Core" confidence="MEDIUM"/>
|
||||
<evidence type="product" source="pom" name="artifactid" value="spring-security-core" confidence="HIGH"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="SpringSource" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="vendor" source="composer.lock" name="vendor" value="symfony" confidence="HIGHEST"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="sensiolabs" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="vendor" source="composer.lock" name="vendor" value="zendframework" confidence="HIGHEST"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="zend" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="composer.lock" name="product" value="zendframework" confidence="HIGHEST"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="zend_framework" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<vendorDuplicatingHint value="sun" duplicate="oracle"/>
|
||||
<vendorDuplicatingHint value="oracle" duplicate="sun"/>
|
||||
<hints xmlns="https://jeremylong.github.io/DependencyCheck/dependency-hint.1.1.xsd">
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="Manifest" name="Implementation-Title" value="Spring Framework" confidence="HIGH"/>
|
||||
<evidence type="product" source="Manifest" name="Implementation-Title" value="org.springframework.core" confidence="HIGH"/>
|
||||
<evidence type="product" source="Manifest" name="Implementation-Title" value="spring-core" confidence="HIGH"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="SpringSource" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="pivotal software" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="jar" name="package name" value="springframework" confidence="LOW"/>
|
||||
<fileName contains="spring"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="SpringSource" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="pivotal software" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="jar" name="package name" value="springframework" confidence="LOW"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="pivotal software" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="Manifest" name="Bundle-Name" value="Spring Security Core" confidence="MEDIUM"/>
|
||||
<evidence type="product" source="pom" name="artifactid" value="spring-security-core" confidence="HIGH"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="SpringSource" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="vendor" source="composer.lock" name="vendor" value="symfony" confidence="HIGHEST"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="sensiolabs" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="vendor" source="composer.lock" name="vendor" value="zendframework" confidence="HIGHEST"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="zend" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="composer.lock" name="product" value="zendframework" confidence="HIGHEST"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="zend_framework" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
|
||||
<!-- begin hack for temporary patch of issue #534-->
|
||||
<hint>
|
||||
<given>
|
||||
<fileName regex="true" contains=".*hibernate-validator-5\.0\..*"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="version" source="hint" name="version" value="5.0" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<fileName regex="true" contains=".*hibernate-validator-5\.1\.[01].*"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="version" source="hint" name="version" value="5.1" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<fileName regex="true" contains=".*hibernate-validator-4\.1\..*"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="version" source="hint" name="version" value="4.1.0" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<fileName regex="true" contains=".*hibernate-validator-4\.2\.0.*"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="version" source="hint" name="version" value="4.2.0" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<fileName regex="true" contains=".*hibernate-validator-4\.3\.[01]\..*"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="version" source="hint" name="version" value="4.3.0" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<!-- end hack for temporary patch of issue #534-->
|
||||
|
||||
|
||||
<vendorDuplicatingHint value="sun" duplicate="oracle"/>
|
||||
<vendorDuplicatingHint value="oracle" duplicate="sun"/>
|
||||
</hints>
|
||||
@@ -8,6 +8,8 @@
|
||||
<cpe>cpe:/a:mod_security:mod_security</cpe>
|
||||
<cpe>cpe:/a:springsource:spring_framework</cpe>
|
||||
<cpe>cpe:/a:vmware:springsource_spring_framework</cpe>
|
||||
<cpe>cpe:/a:pivotal:spring_framework</cpe>
|
||||
<cpe>cpe:/a:pivotal_software:spring_framework</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
@@ -18,6 +20,7 @@
|
||||
<cpe>cpe:/a:springsource:spring_framework</cpe>
|
||||
<cpe>cpe:/a:vmware:springsource_spring_framework</cpe>
|
||||
<cpe>cpe:/a:pivotal:spring_framework</cpe>
|
||||
<cpe>cpe:/a:pivotal_software:spring_framework</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
@@ -333,6 +336,8 @@
|
||||
<filePath regex="true">.*\.(jar|ear|war|pom)</filePath>
|
||||
<cpe>cpe:/a:pam:pam</cpe>
|
||||
<cpe>cpe:/a:pam_ssh:pam_ssh</cpe>
|
||||
<cpe>cpe:/a:sun:linux</cpe>
|
||||
<cpe>cpe:/a:sun:sunos</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
@@ -410,7 +415,7 @@
|
||||
<notes><![CDATA[
|
||||
Aether false positive.
|
||||
]]></notes>
|
||||
<gav regex="true">org.eclipse.aether:aether.*</gav>
|
||||
<gav regex="true">org\.eclipse\.aether:aether.*</gav>
|
||||
<cpe>cpe:/a:eclipse:eclipse_ide</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
@@ -420,4 +425,80 @@
|
||||
<filePath regex="true">.*\.(jar|ear|war|pom)</filePath>
|
||||
<cpe>cpe:/a:services_project:services</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
jenkins-client false positives
|
||||
]]></notes>
|
||||
<gav regex="true">com\.offbytwo\.jenkins:jenkins-client:.*</gav>
|
||||
<cpe>cpe:/a:jenkins:jenkins</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
xstream false positives
|
||||
]]></notes>
|
||||
<gav regex="true">^(?!com.thoughtworks).*xstream.*$</gav>
|
||||
<cpe>cpe:/a:x-stream:xstream</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
false positive per issue #582
|
||||
]]></notes>
|
||||
<gav regex="true">^org\.glassfish\.jersey\.ext:jersey-proxy-client:.*$</gav>
|
||||
<cpe>cpe:/a:oracle:oracle_client</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
file name: smiley-http-proxy-servlet-1.7.jar
|
||||
]]></notes>
|
||||
<gav regex="true">^org\.mitre\.dsmiley\.httpproxy:smiley-http-proxy-servlet:.*$</gav>
|
||||
<cpe>cpe:/a:shttp:shttp</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
This CVE is disputed by the vendor and is not considered an issue.
|
||||
]]></notes>
|
||||
<filePath regex="true">.*</filePath>
|
||||
<cve>CVE-2007-6059</cve>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
file name: jackson-core-2.6.5.jar
|
||||
]]></notes>
|
||||
<gav regex="true">com\.fasterxml\.jackson\.core:jackson.*</gav>
|
||||
<cve>CVE-2016-3720</cve>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
file name: jackson-core-2.6.5.jar
|
||||
]]></notes>
|
||||
<gav regex="true">com\.fasterxml\.jackson\.dataformat:jackson(?!\-dataformat\-xml).*</gav>
|
||||
<cve>CVE-2016-3720</cve>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
False positives per issue #642
|
||||
]]></notes>
|
||||
<gav regex="true">^org\.springframework\.boot:spring-boot.*$</gav>
|
||||
<cpe>cpe:/a:pivotal_software:spring_framework</cpe>
|
||||
<cpe>cpe:/a:pivotal:spring_framework</cpe>
|
||||
<cpe>cpe:/a:vmware:springsource_spring_framework</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
False positives per issue #642
|
||||
]]></notes>
|
||||
<gav regex="true">^org\.springframework:spring-context:.*$</gav>
|
||||
<cpe>cpe:/a:context_project:context</cpe>
|
||||
</suppress>
|
||||
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
Node.js false positives per issues #512 and #510
|
||||
]]></notes>
|
||||
<filePath regex="true">.*package\.json$</filePath>
|
||||
<cpe>cpe:/a:file_project:file</cpe>
|
||||
<cpe>cpe:/a:file:file</cpe>
|
||||
<cpe>cpe:/a:shim:shim</cpe>
|
||||
<cpe>cpe:/a:shim_project:shim</cpe>
|
||||
</suppress>
|
||||
</suppressions>
|
||||
|
||||
@@ -4,7 +4,7 @@ autoupdate=true
|
||||
max.download.threads=3
|
||||
|
||||
# the url to obtain the current engine version from
|
||||
engine.version.url=http://jeremylong.github.io/DependencyCheck/current.txt
|
||||
engine.version.url=https://jeremylong.github.io/DependencyCheck/current.txt
|
||||
|
||||
#temp.directory defaults to System.getProperty("java.io.tmpdir")
|
||||
#temp.directory=[path to temp directory]
|
||||
@@ -53,6 +53,8 @@ cve.startyear=2002
|
||||
# the path to the modified nvd cve xml file.
|
||||
cve.url-1.2.modified=https://nvd.nist.gov/download/nvdcve-Modified.xml.gz
|
||||
#cve.url-1.2.modified=http://nvd.nist.gov/download/nvdcve-modified.xml
|
||||
#the original URL and modified URL should be the same; this is used to detect if we are using an internal NVD CVE copy
|
||||
cve.url-2.0.original=https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-Modified.xml.gz
|
||||
cve.url-2.0.modified=https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-Modified.xml.gz
|
||||
#cve.url-2.0.modified=http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-modified.xml
|
||||
cve.url-1.2.base=https://nvd.nist.gov/download/nvdcve-%d.xml.gz
|
||||
@@ -62,7 +64,7 @@ cve.url-2.0.base=https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml.gz
|
||||
cve.cpe.startswith.filter=cpe:/a:
|
||||
|
||||
cpe.validfordays=30
|
||||
cpe.url=http://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.3.xml.gz
|
||||
cpe.url=https://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.3.xml.gz
|
||||
|
||||
# the URL for searching Nexus for SHA-1 hashes and whether it's enabled
|
||||
analyzer.nexus.enabled=true
|
||||
@@ -73,13 +75,14 @@ analyzer.nexus.proxy=true
|
||||
|
||||
# the URL for searching search.maven.org for SHA-1 and whether it's enabled
|
||||
analyzer.central.enabled=true
|
||||
analyzer.central.url=http://search.maven.org/solrsearch/select
|
||||
analyzer.central.url=https://search.maven.org/solrsearch/select
|
||||
|
||||
# the number of nested archives that will be searched.
|
||||
archive.scan.depth=3
|
||||
|
||||
# use HEAD (default) or GET as HTTP request method for query timestamp
|
||||
downloader.quick.query.timestamp=true
|
||||
downloader.tls.protocols=TLSv1,TLSv1.1,TLSv1.2,TLSv1.3
|
||||
|
||||
analyzer.experimental.enabled=false
|
||||
analyzer.jar.enabled=true
|
||||
@@ -96,6 +99,19 @@ analyzer.nuspec.enabled=true
|
||||
analyzer.openssl.enabled=true
|
||||
analyzer.central.enabled=true
|
||||
analyzer.nexus.enabled=false
|
||||
analyzer.cocoapods.enabled=true
|
||||
analyzer.swift.package.manager.enabled=true
|
||||
#whether the nexus analyzer uses the proxy
|
||||
analyzer.nexus.proxy=true
|
||||
|
||||
analyzer.cpe.enabled=true
|
||||
analyzer.cpesuppression.enabled=true
|
||||
analyzer.dependencybundling.enabled=true
|
||||
analyzer.dependencymerging.enabled=true
|
||||
analyzer.falsepositive.enabled=true
|
||||
analyzer.filename.enabled=true
|
||||
analyzer.hint.enabled=true
|
||||
analyzer.nvdcve.enabled=true
|
||||
analyzer.vulnerabilitysuppression.enabled=true
|
||||
updater.nvdcve.enabled=true
|
||||
updater.versioncheck.enabled=true
|
||||
analyzer.versionfilter.enabled=true
|
||||
@@ -0,0 +1,82 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xs:schema id="hints"
|
||||
xmlns:xs="http://www.w3.org/2001/XMLSchema"
|
||||
elementFormDefault="qualified"
|
||||
targetNamespace="https://jeremylong.github.io/DependencyCheck/dependency-hint.1.1.xsd"
|
||||
xmlns:dc="https://jeremylong.github.io/DependencyCheck/dependency-hint.1.1.xsd">
|
||||
|
||||
<xs:simpleType name="givenType">
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="vendor"/>
|
||||
<xs:enumeration value="product"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<xs:simpleType name="addType">
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="vendor"/>
|
||||
<xs:enumeration value="product"/>
|
||||
<xs:enumeration value="version"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<xs:simpleType name="confidence">
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="HIGHEST"/>
|
||||
<xs:enumeration value="HIGH"/>
|
||||
<xs:enumeration value="MEDIUM"/>
|
||||
<xs:enumeration value="LOW"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<xs:complexType name="givenEvidence">
|
||||
<xs:attribute name="type" use="required" type="dc:givenType"/>
|
||||
<xs:attribute name="source" use="required" type="xs:string"/>
|
||||
<xs:attribute name="name" use="required" type="xs:string"/>
|
||||
<xs:attribute name="value" use="required" type="xs:string"/>
|
||||
<xs:attribute name="confidence" use="required" type="dc:confidence"/>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="addEvidence">
|
||||
<xs:attribute name="type" use="required" type="dc:addType"/>
|
||||
<xs:attribute name="source" use="required" type="xs:string"/>
|
||||
<xs:attribute name="name" use="required" type="xs:string"/>
|
||||
<xs:attribute name="value" use="required" type="xs:string"/>
|
||||
<xs:attribute name="confidence" use="required" type="dc:confidence"/>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="fileName">
|
||||
<xs:attribute name="contains" use="required" type="xs:string"/>
|
||||
<xs:attribute name="regex" use="optional" type="xs:boolean" default="false"/>
|
||||
<xs:attribute name="caseSensitive" use="optional" type="xs:boolean" default="false"/>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="given">
|
||||
<xs:choice minOccurs="1" maxOccurs="unbounded">
|
||||
<xs:element name="evidence" type="dc:givenEvidence"/>
|
||||
<xs:element name="fileName" type="dc:fileName"/>
|
||||
</xs:choice>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="add">
|
||||
<xs:sequence minOccurs="1" maxOccurs="unbounded">
|
||||
<xs:element name="evidence" type="dc:addEvidence"/>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="hint">
|
||||
<xs:sequence minOccurs="1" maxOccurs="1">
|
||||
<xs:element name="given" type="dc:given"/>
|
||||
<xs:element name="add" type="dc:add"/>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="duplicatingHint">
|
||||
<xs:attribute name="value" use="required" type="xs:string"/>
|
||||
<xs:attribute name="duplicate" use="required" type="xs:string"/>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:element name="hints">
|
||||
<xs:complexType>
|
||||
<xs:sequence minOccurs="0" maxOccurs="unbounded">
|
||||
<xs:sequence minOccurs="0" maxOccurs="unbounded">
|
||||
<xs:element name="hint" type="dc:hint"/>
|
||||
</xs:sequence>
|
||||
<xs:sequence minOccurs="0" maxOccurs="unbounded">
|
||||
<xs:element name="vendorDuplicatingHint" type="dc:duplicatingHint"/>
|
||||
</xs:sequence>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
</xs:schema>
|
||||
@@ -83,16 +83,68 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
xml += $("#modal-text").text().replace(/\n/g,'\n ');
|
||||
xml += '\n</suppressions>';
|
||||
$('#modal-text').text(xml).focus().select();
|
||||
$('#modal-add-header').toggleClass('active');
|
||||
});
|
||||
});
|
||||
function copyText(name, sha1, type, val) {
|
||||
function suppressSwitchTo(switchTo) {
|
||||
$('#modal-suppress-change-to-sha1').toggleClass('active');
|
||||
$('#modal-suppress-change-to-gav').toggleClass('active');
|
||||
setCopyText($('#suppress-name').val(),
|
||||
switchTo,
|
||||
$('#suppress-'+switchTo).val(),
|
||||
$('#suppress-type').val(),
|
||||
$('#suppress-val').val());
|
||||
}
|
||||
function copyText(name, sha1, gav, type, val) {
|
||||
$('#suppress-name').val(name);
|
||||
$('#suppress-type').val(type);
|
||||
$('#suppress-val').val(val);
|
||||
$('#suppress-sha1').val(sha1);
|
||||
$('#suppress-gav').val(gav);
|
||||
if (gav=='') {
|
||||
if ($('#modal-suppress-change-to-gav').hasClass('active')) {
|
||||
$('#modal-suppress-change-to-gav').toggleClass('active');
|
||||
}
|
||||
if ($('#modal-suppress-change-to-sha1').hasClass('active')) {
|
||||
$('#modal-suppress-change-to-sha1').toggleClass('active');
|
||||
}
|
||||
setCopyText(name, 'sha1', sha1, type, val);
|
||||
} else {
|
||||
if ($('#modal-suppress-change-to-gav').hasClass('active')) {
|
||||
$('#modal-suppress-change-to-gav').toggleClass('active');
|
||||
}
|
||||
if (!$('#modal-suppress-change-to-sha1').hasClass('active')) {
|
||||
$('#modal-suppress-change-to-sha1').toggleClass('active');
|
||||
}
|
||||
setCopyText(name, 'gav', gav, type, val);
|
||||
}
|
||||
}
|
||||
function setCopyText(name, matchType, matchValue, suppressType, suppressVal) {
|
||||
xml = '<suppress>\n';
|
||||
xml += ' <notes><!'+'[CDATA[\n file name: ' + name + '\n ]]'+'></notes>\n';
|
||||
xml += ' <sha1>' + sha1 + '</sha1>\n';
|
||||
xml += ' <'+type+'>' + val + '</'+type+'>\n';
|
||||
if (matchType=='gav') {
|
||||
v = matchValue.match(/^[^:]+:[^:]+:/);
|
||||
if (v && v[0]) {
|
||||
xml += ' <'+matchType+' regex="true">^' + v[0].replace(/\./g,'\\.') + '.*$</'+matchType+'>\n';
|
||||
} else {
|
||||
xml += ' <'+matchType+'>' + matchValue + '</'+matchType+'>\n';
|
||||
}
|
||||
} else {
|
||||
xml += ' <'+matchType+'>' + matchValue + '</'+matchType+'>\n';
|
||||
}
|
||||
if (suppressType=='cpe') {
|
||||
v = suppressVal.match(/^cpe:\/a:[^:]+:[^:]+/);
|
||||
if (v && v[0]) {
|
||||
xml += ' <'+suppressType+'>' + v[0] + '</'+suppressType+'>\n';
|
||||
} else {
|
||||
xml += ' <'+suppressType+'>' + suppressVal + '</'+suppressType+'>\n';
|
||||
}
|
||||
} else {
|
||||
xml += ' <'+suppressType+'>' + suppressVal + '</'+suppressType+'>\n';
|
||||
}
|
||||
xml += '</suppress>';
|
||||
$('#modal-text').text(xml);
|
||||
$('#modal-content,#modal-background').toggleClass('active');
|
||||
$('#modal-content,#modal-background').addClass('active');
|
||||
$('#modal-text').focus();
|
||||
$('#modal-text').select();
|
||||
}
|
||||
@@ -150,6 +202,12 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
#modal-text:focus {
|
||||
outline: none;
|
||||
}
|
||||
.suppresstype {
|
||||
display: none;
|
||||
}
|
||||
.suppresstype.active {
|
||||
display: block;
|
||||
}
|
||||
.suppressedLabel {
|
||||
cursor: default;
|
||||
padding:1px;
|
||||
@@ -504,6 +562,11 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<div id="modal-background"></div>
|
||||
<div id="modal-content">
|
||||
<div>Press CTR-C to copy XML <a href="http://jeremylong.github.io/DependencyCheck/general/suppression.html" class="infolink" target="_blank" title="Help with suppressing false positives">[help]</a></div>
|
||||
<button onclick="suppressSwitchTo('gav')" id="modal-suppress-change-to-gav" class="modal-button suppresstype" title="Supress by Maven Group Artifact Version">Suppress By GAV</button>
|
||||
<button onclick="suppressSwitchTo('sha1')" id="modal-suppress-change-to-sha1" class="modal-button suppresstype" title="Supress by SHA1 hash">Suppress By SHA1</button><br/>
|
||||
<input type="hidden" id="suppress-name"/>
|
||||
<input type="hidden" id="suppress-type"/><input type="hidden" id="suppress-val"/>
|
||||
<input type="hidden" id="suppress-sha1"/><input type="hidden" id="suppress-gav"/>
|
||||
<textarea id="modal-text" cols="50" rows="10" readonly></textarea><br/>
|
||||
<button id="modal-add-header" title="Add the parent XML nodes to create the complete XML file that can be used to suppress this finding" class="modal-button">Complete XML Doc</button><button id="modal-close" class="modal-button-right">Close</button>
|
||||
</div>
|
||||
@@ -515,6 +578,10 @@ the reporting provided constitutes acceptance for use in an AS IS condition, and
|
||||
implied or otherwise, with regard to the analysis or its use. Any use of the tool and the reporting provided
|
||||
is at the user’s risk. In no event shall the copyright holder or OWASP be held liable for any damages whatsoever
|
||||
arising out of or in connection with the use of this tool, the analysis performed, or the resulting report.</p>
|
||||
<h3><a href="http://jeremylong.github.io/DependencyCheck/general/thereport.html" target="_bank">How to read the report</a> |
|
||||
<a href="http://jeremylong.github.io/DependencyCheck/general/suppression.html" target="_bank">Suppressing false positives</a> |
|
||||
Getting Help: <a href="https://groups.google.com/forum/#!forum/dependency-check" target="_blank">google group</a> |
|
||||
<a href="https://github.com/jeremylong/DependencyCheck/issues" target="_blank">github issues</a></h3>
|
||||
]]#
|
||||
<h2 class="">Project: $enc.html($applicationName)</h2>
|
||||
<div class="">
|
||||
@@ -542,7 +609,7 @@ arising out of or in connection with the use of this tool, the analysis performe
|
||||
<li><i>dependency-check version</i>: $version</li>
|
||||
<li><i>Report Generated On</i>: $scanDate</li>
|
||||
<li><i>Dependencies Scanned</i>: $depCount ($dependencies.size() unique)</li>
|
||||
<li><i>Vulnerable Dependencies</i>: $vulnDepCount</li>
|
||||
<li><i>Vulnerable Dependencies</i>: <span id="vulnerableCount">$vulnDepCount</span></li>
|
||||
<li><i>Vulnerabilities Found</i>: $vulnCount</li>
|
||||
<li><i>Vulnerabilities Suppressed</i>: $vulnSuppressedCount</li>
|
||||
<li class="scaninfo">...</li>
|
||||
@@ -577,8 +644,8 @@ arising out of or in connection with the use of this tool, the analysis performe
|
||||
#end
|
||||
<td data-sort-value="$sortValue">
|
||||
#set($sortValue="")
|
||||
#set($cpeSort=0)
|
||||
#foreach($id in $dependency.getIdentifiers())
|
||||
#set($cpeSort=0)
|
||||
#if ($id.type=="maven")
|
||||
#if ($mavenlink=="" || !$mavenlink.url)
|
||||
#set($mavenlink=$id)
|
||||
@@ -725,6 +792,12 @@ arising out of or in connection with the use of this tool, the analysis performe
|
||||
<ul><li><b>None</b></li></ul>
|
||||
#else ## ($dependency.getIdentifiers().size()>0)
|
||||
<ul>
|
||||
#set($suppressGav='')
|
||||
#foreach($id in $dependency.getIdentifiers())
|
||||
#if ($id.type=="maven")
|
||||
#set($suppressGav=$id.value)
|
||||
#end
|
||||
#end
|
||||
#foreach($id in $dependency.getIdentifiers())
|
||||
#if( $id.url )
|
||||
##yes, we are HTML Encoding the href. this is okay. We can't URL encode as we have to trust the analyzer here...
|
||||
@@ -737,7 +810,7 @@ arising out of or in connection with the use of this tool, the analysis performe
|
||||
#end
|
||||
#if ($id.type=="cpe")
|
||||
##yes, we are HTML Encoding into JavaScript... the escape utils don't have a JS Encode and I haven't written one yet
|
||||
<button class="copybutton" title="Generate Suppression XML for this CPE for this file" onclick="copyText('$enc.html($dependency.FileNameForJavaScript)', '$enc.html($dependency.Sha1sum)', 'cpe', '$enc.html($id.value)')">suppress</button>
|
||||
<button class="copybutton" title="Generate Suppression XML for this CPE for this file" onclick="copyText('$enc.html($dependency.FileNameForJavaScript)', '$enc.html($dependency.Sha1sum)', '$enc.html($suppressGav)', 'cpe', '$enc.html($id.value)')">suppress</button>
|
||||
#end
|
||||
#if ($id.description)
|
||||
<br/>$enc.html($id.description)
|
||||
@@ -753,7 +826,7 @@ arising out of or in connection with the use of this tool, the analysis performe
|
||||
<div id="content$cnt" class="subsectioncontent standardsubsection">
|
||||
#foreach($vuln in $dependency.getVulnerabilities())
|
||||
#set($vsctr=$vsctr+1)
|
||||
<p><b><a target="_blank" href="http://web.nvd.nist.gov/view/vuln/detail?vulnId=$enc.url($vuln.name)">$enc.html($vuln.name)</a></b> <button class="copybutton" title="Generate Suppression XML for this CCE for this file" onclick="copyText('$enc.html($dependency.FileNameForJavaScript)', '$enc.html($dependency.Sha1sum)', 'cve', '$enc.html($vuln.name)')">suppress</button></p>
|
||||
<p><b><a target="_blank" href="http://web.nvd.nist.gov/view/vuln/detail?vulnId=$enc.url($vuln.name)">$enc.html($vuln.name)</a></b> <button class="copybutton" title="Generate Suppression XML for this CCE for this file" onclick="copyText('$enc.html($dependency.FileNameForJavaScript)', '$enc.html($dependency.Sha1sum)', '$enc.html($suppressGav)', 'cve', '$enc.html($vuln.name)')">suppress</button></p>
|
||||
<p>Severity:
|
||||
#if ($vuln.cvssScore<4.0)
|
||||
Low
|
||||
|
||||
@@ -177,6 +177,11 @@ the reporting provided constitutes acceptance for use in an AS IS condition, and
|
||||
implied or otherwise, with regard to the analysis or its use. Any use of the tool and the reporting provided
|
||||
is at the user’s risk. In no event shall the copyright holder or OWASP be held liable for any damages whatsoever
|
||||
arising out of or in connection with the use of this tool, the analysis performed, or the resulting report.</p>
|
||||
<h3>About The Vulnerability Report | Getting Help: <a href="https://groups.google.com/forum/#!forum/dependency-check" target="_blank">google group</a> |
|
||||
<a href="https://github.com/jeremylong/DependencyCheck/issues" target="_blank">github issues</a></h3>
|
||||
<p>This report is intended to be a quick summary of findings. It is highly recommended that you use the full HTML
|
||||
report to determine if any <a href="http://jeremylong.github.io/DependencyCheck/general/suppression.html">false positives</a>
|
||||
have been reported. Additionally, the HTML report provides many features not found in the vulnerability report.</p>
|
||||
]]#
|
||||
<h2 class="sectionheader white">Vulnerability Report for $enc.html($applicationName)</h2>
|
||||
<div class="sectioncontent">Report Generated On: $scanDate<br/><br/>
|
||||
@@ -190,7 +195,7 @@ arising out of or in connection with the use of this tool, the analysis performe
|
||||
#end
|
||||
#end
|
||||
Dependencies Scanned: $depCount<br/>
|
||||
Vulnerable Dependencies: $vulnCount<br/><br/>
|
||||
Vulnerable Dependencies: <span id="volnCount">$vulnCount</span><br/><br/>
|
||||
<h2>Vulnerable Dependencies</h2>
|
||||
#set($cnt=0)
|
||||
<table id="vulnTable" class="lined">
|
||||
@@ -222,10 +227,10 @@ arising out of or in connection with the use of this tool, the analysis performe
|
||||
($vuln.cvssScore)
|
||||
<td>#set($cnt=$cnt+1)
|
||||
#if($dependency.getRelatedDependencies().size()>0)<span id="header$cnt" class="expandable collapsedList">#end
|
||||
$enc.html($dependency.DisplayFileName)
|
||||
<span title="$enc.html($dependency.FilePath)">$enc.html($dependency.DisplayFileName)</span>
|
||||
#if($dependency.getRelatedDependencies().size()>0) </span><div id="content$cnt" class="hidden">#end
|
||||
#foreach($related in $dependency.getRelatedDependencies())
|
||||
$enc.html($related.DisplayFileName)<br/>
|
||||
<span title="$enc.html($related.FilePath)">$enc.html($related.DisplayFileName)</span><br/>
|
||||
#end
|
||||
#if($dependency.getRelatedDependencies().size()>0)</div#end
|
||||
</td>
|
||||
|
||||
@@ -0,0 +1,101 @@
|
||||
package org.owasp.dependencycheck;
|
||||
|
||||
import mockit.Expectations;
|
||||
import mockit.Mocked;
|
||||
import mockit.Verifications;
|
||||
import org.junit.Test;
|
||||
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
|
||||
import org.owasp.dependencycheck.analyzer.HintAnalyzer;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
public class AnalysisTaskTest extends BaseTest {
|
||||
|
||||
@Mocked
|
||||
FileTypeAnalyzer fileTypeAnalyzer;
|
||||
|
||||
@Mocked
|
||||
Dependency dependency;
|
||||
|
||||
@Mocked
|
||||
Engine engine;
|
||||
|
||||
|
||||
@Test
|
||||
public void shouldAnalyzeReturnsTrueForNonFileTypeAnalyzers() {
|
||||
AnalysisTask instance = new AnalysisTask(new HintAnalyzer(), null, null, null, null);
|
||||
boolean shouldAnalyze = instance.shouldAnalyze();
|
||||
assertTrue(shouldAnalyze);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void shouldAnalyzeReturnsTrueIfTheFileTypeAnalyzersAcceptsTheDependency() {
|
||||
final File dependencyFile = new File("");
|
||||
new Expectations() {{
|
||||
dependency.getActualFile();
|
||||
result = dependencyFile;
|
||||
|
||||
fileTypeAnalyzer.accept(dependencyFile);
|
||||
result = true;
|
||||
}};
|
||||
|
||||
AnalysisTask analysisTask = new AnalysisTask(fileTypeAnalyzer, dependency, null, null, Settings.getInstance());
|
||||
|
||||
boolean shouldAnalyze = analysisTask.shouldAnalyze();
|
||||
assertTrue(shouldAnalyze);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void shouldAnalyzeReturnsFalseIfTheFileTypeAnalyzerDoesNotAcceptTheDependency() {
|
||||
final File dependencyFile = new File("");
|
||||
new Expectations() {{
|
||||
dependency.getActualFile();
|
||||
result = dependencyFile;
|
||||
|
||||
fileTypeAnalyzer.accept(dependencyFile);
|
||||
result = false;
|
||||
}};
|
||||
|
||||
AnalysisTask analysisTask = new AnalysisTask(fileTypeAnalyzer, dependency, null, null, Settings.getInstance());
|
||||
|
||||
boolean shouldAnalyze = analysisTask.shouldAnalyze();
|
||||
assertFalse(shouldAnalyze);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void taskAnalyzes() throws Exception {
|
||||
final AnalysisTask analysisTask = new AnalysisTask(fileTypeAnalyzer, dependency, engine, null, Settings.getInstance());
|
||||
new Expectations(analysisTask) {{
|
||||
analysisTask.shouldAnalyze();
|
||||
result = true;
|
||||
}};
|
||||
|
||||
analysisTask.call();
|
||||
|
||||
new Verifications() {{
|
||||
fileTypeAnalyzer.analyze(dependency, engine);
|
||||
times = 1;
|
||||
}};
|
||||
}
|
||||
|
||||
@Test
|
||||
public void taskDoesNothingIfItShouldNotAnalyze() throws Exception {
|
||||
final AnalysisTask analysisTask = new AnalysisTask(fileTypeAnalyzer, dependency, engine, null, Settings.getInstance());
|
||||
new Expectations(analysisTask) {{
|
||||
analysisTask.shouldAnalyze();
|
||||
result = false;
|
||||
}};
|
||||
|
||||
analysisTask.call();
|
||||
|
||||
new Verifications() {{
|
||||
fileTypeAnalyzer.analyze(dependency, engine);
|
||||
times = 0;
|
||||
}};
|
||||
}
|
||||
}
|
||||
@@ -65,7 +65,7 @@ public abstract class BaseDBTestCase extends BaseTest {
|
||||
FileInputStream fis = null;
|
||||
ZipInputStream zin = null;
|
||||
try {
|
||||
File path = new File(BaseDBTestCase.class.getClassLoader().getResource("data.zip").getPath());
|
||||
File path = new File(BaseDBTestCase.class.getClassLoader().getResource("data.zip").toURI().getPath());
|
||||
fis = new FileInputStream(path);
|
||||
zin = new ZipInputStream(new BufferedInputStream(fis));
|
||||
ZipEntry entry;
|
||||
|
||||
@@ -17,6 +17,8 @@ package org.owasp.dependencycheck;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.InputStream;
|
||||
import java.net.URISyntaxException;
|
||||
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Assume;
|
||||
import org.junit.BeforeClass;
|
||||
@@ -69,8 +71,12 @@ public class BaseTest {
|
||||
* @return the resource as an File
|
||||
*/
|
||||
public static File getResourceAsFile(Object o, String resource) {
|
||||
File f = new File(o.getClass().getClassLoader().getResource(resource).getPath());
|
||||
Assume.assumeTrue(String.format("%n%n[SEVERE] Unable to load resource for test case: %s%n%n", resource), f.exists());
|
||||
return f;
|
||||
try{
|
||||
File f = new File(o.getClass().getClassLoader().getResource(resource).toURI().getPath());
|
||||
Assume.assumeTrue(String.format("%n%n[SEVERE] Unable to load resource for test case: %s%n%n", resource), f.exists());
|
||||
return f;
|
||||
}catch (URISyntaxException e){
|
||||
throw new UnsupportedOperationException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,96 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2016 Jeremy Long. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck;
|
||||
|
||||
import mockit.Expectations;
|
||||
import mockit.Mocked;
|
||||
import org.junit.Test;
|
||||
import org.owasp.dependencycheck.analyzer.Analyzer;
|
||||
import org.owasp.dependencycheck.analyzer.JarAnalyzer;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.ExceptionCollection;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class EngineTest extends BaseDBTestCase {
|
||||
|
||||
@Mocked
|
||||
Analyzer analyzer;
|
||||
|
||||
@Mocked
|
||||
AnalysisTask analysisTask;
|
||||
|
||||
|
||||
/**
|
||||
* Test of scanFile method, of class Engine.
|
||||
*/
|
||||
@Test
|
||||
public void testScanFile() throws DatabaseException {
|
||||
Engine instance = new Engine();
|
||||
instance.addFileTypeAnalyzer(new JarAnalyzer());
|
||||
File file = BaseTest.getResourceAsFile(this, "dwr.jar");
|
||||
Dependency dwr = instance.scanFile(file);
|
||||
file = BaseTest.getResourceAsFile(this, "org.mortbay.jmx.jar");
|
||||
instance.scanFile(file);
|
||||
assertEquals(2, instance.getDependencies().size());
|
||||
|
||||
file = BaseTest.getResourceAsFile(this, "dwr.jar");
|
||||
Dependency secondDwr = instance.scanFile(file);
|
||||
|
||||
assertEquals(2, instance.getDependencies().size());
|
||||
assertTrue(dwr == secondDwr);
|
||||
}
|
||||
|
||||
@Test(expected = ExceptionCollection.class)
|
||||
public void exceptionDuringAnalysisTaskExecutionIsFatal() throws DatabaseException, ExceptionCollection {
|
||||
final ExecutorService executorService = Executors.newFixedThreadPool(3);
|
||||
final Engine instance = new Engine();
|
||||
final List<Throwable> exceptions = new ArrayList<Throwable>();
|
||||
|
||||
new Expectations() {{
|
||||
analysisTask.call();
|
||||
result = new IllegalStateException("Analysis task execution threw an exception");
|
||||
}};
|
||||
|
||||
final List<AnalysisTask> failingAnalysisTask = new ArrayList<AnalysisTask>();
|
||||
failingAnalysisTask.add(analysisTask);
|
||||
|
||||
new Expectations(instance) {{
|
||||
instance.getExecutorService(analyzer);
|
||||
result = executorService;
|
||||
|
||||
instance.getAnalysisTasks(analyzer, exceptions);
|
||||
result = failingAnalysisTask;
|
||||
}};
|
||||
|
||||
instance.executeAnalysisTasks(analyzer, exceptions);
|
||||
|
||||
assertTrue(executorService.isShutdown());
|
||||
}
|
||||
}
|
||||
@@ -104,7 +104,7 @@ public class AbstractSuppressionAnalyzerTest extends BaseTest {
|
||||
public class AbstractSuppressionAnalyzerImpl extends AbstractSuppressionAnalyzer {
|
||||
|
||||
@Override
|
||||
public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
|
||||
}
|
||||
|
||||
@@ -117,6 +117,11 @@ public class AbstractSuppressionAnalyzerTest extends BaseTest {
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ public class ArchiveAnalyzerTest extends BaseTest {
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of analyzeFileType method, of class ArchiveAnalyzer.
|
||||
* Test of analyzeDependency method, of class ArchiveAnalyzer.
|
||||
*/
|
||||
@Test
|
||||
public void testZippableExtensions() throws Exception {
|
||||
|
||||
@@ -24,6 +24,7 @@ import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import org.junit.Assume;
|
||||
import static org.junit.Assume.assumeFalse;
|
||||
import static org.junit.Assume.assumeNotNull;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.owasp.dependencycheck.BaseTest;
|
||||
@@ -61,6 +62,7 @@ public class AssemblyAnalyzerTest extends BaseTest {
|
||||
analyzer = new AssemblyAnalyzer();
|
||||
analyzer.accept(new File("test.dll")); // trick into "thinking it is active"
|
||||
analyzer.initialize();
|
||||
Assume.assumeTrue("Mono is not installed, skipping tests.", analyzer.buildArgumentList() == null);
|
||||
} catch (Exception e) {
|
||||
if (e.getMessage().contains("Could not execute .NET AssemblyAnalyzer")) {
|
||||
LOGGER.warn("Exception setting up AssemblyAnalyzer. Tests will be incomplete");
|
||||
@@ -81,7 +83,7 @@ public class AssemblyAnalyzerTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testAnalysis() throws Exception {
|
||||
//File f = new File(AssemblyAnalyzerTest.class.getClassLoader().getResource("GrokAssembly.exe").getPath());
|
||||
assumeNotNull(analyzer.buildArgumentList());
|
||||
File f = BaseTest.getResourceAsFile(this, "GrokAssembly.exe");
|
||||
Dependency d = new Dependency(f);
|
||||
analyzer.analyze(d, null);
|
||||
@@ -104,7 +106,7 @@ public class AssemblyAnalyzerTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testLog4Net() throws Exception {
|
||||
//File f = new File(AssemblyAnalyzerTest.class.getClassLoader().getResource("log4net.dll").getPath());
|
||||
assumeNotNull(analyzer.buildArgumentList());
|
||||
File f = BaseTest.getResourceAsFile(this, "log4net.dll");
|
||||
|
||||
Dependency d = new Dependency(f);
|
||||
@@ -116,9 +118,10 @@ public class AssemblyAnalyzerTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testNonexistent() {
|
||||
assumeNotNull(analyzer.buildArgumentList());
|
||||
|
||||
// Tweak the log level so the warning doesn't show in the console
|
||||
String oldProp = System.getProperty(LOG_KEY, "info");
|
||||
//File f = new File(AssemblyAnalyzerTest.class.getClassLoader().getResource("log4net.dll").getPath());
|
||||
File f = BaseTest.getResourceAsFile(this, "log4net.dll");
|
||||
File test = new File(f.getParent(), "nonexistent.dll");
|
||||
Dependency d = new Dependency(test);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user