mirror of
https://github.com/ysoftdevs/DependencyCheck.git
synced 2026-01-14 07:43:40 +01:00
Compare commits
164 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e0b549e427 | ||
|
|
75207169e3 | ||
|
|
e07f568237 | ||
|
|
e2cd99d40d | ||
|
|
27f2682a98 | ||
|
|
34a2110e9a | ||
|
|
96ba51db4f | ||
|
|
9c6053a60a | ||
|
|
358367ef9e | ||
|
|
a12bc44ecd | ||
|
|
773ac019f8 | ||
|
|
e751b7b814 | ||
|
|
824aa23b9b | ||
|
|
b7b97960a6 | ||
|
|
40f0e907e1 | ||
|
|
5ff0dc885d | ||
|
|
e70a0ee238 | ||
|
|
9338697079 | ||
|
|
4018a4e1de | ||
|
|
e8788dd2a4 | ||
|
|
e70c2f2b05 | ||
|
|
5ed0583039 | ||
|
|
f76d7295f9 | ||
|
|
6e280c4958 | ||
|
|
48b4ef1944 | ||
|
|
9150df964f | ||
|
|
b2237394e1 | ||
|
|
b3a0f7ad26 | ||
|
|
782ba42abc | ||
|
|
74b93ce602 | ||
|
|
e907c40f17 | ||
|
|
13a9dedb1e | ||
|
|
b37698f245 | ||
|
|
d30d000346 | ||
|
|
446239a5bd | ||
|
|
ac25aa795b | ||
|
|
f117a9ded0 | ||
|
|
947d38ccd2 | ||
|
|
23f7996db8 | ||
|
|
9fdff51f26 | ||
|
|
9b43bf004a | ||
|
|
5d73faa1f0 | ||
|
|
9e70279b31 | ||
|
|
9e671d1065 | ||
|
|
7e2c4af0b3 | ||
|
|
11f9092a65 | ||
|
|
6017e5c217 | ||
|
|
b2149ff4b9 | ||
|
|
1a5177c576 | ||
|
|
7020c9931a | ||
|
|
9bc43e2e8e | ||
|
|
26a4e7451e | ||
|
|
3470d33bdc | ||
|
|
51c96894b4 | ||
|
|
7fc2be6a0a | ||
|
|
110c97bc15 | ||
|
|
8d51d8fa1f | ||
|
|
4b02a567e0 | ||
|
|
5a939ec108 | ||
|
|
d9c4480627 | ||
|
|
9388340e23 | ||
|
|
2285d2ef4b | ||
|
|
f84aea0040 | ||
|
|
452969cc92 | ||
|
|
128a600f18 | ||
|
|
7dd9a52e78 | ||
|
|
ff341b7228 | ||
|
|
92a8b4ca85 | ||
|
|
384199b28d | ||
|
|
44edcabe15 | ||
|
|
1a5e9884fc | ||
|
|
cda81315d2 | ||
|
|
d7100e54d1 | ||
|
|
989caead9c | ||
|
|
a9d3b627f1 | ||
|
|
99a1606df1 | ||
|
|
6326513c63 | ||
|
|
f6cfae595a | ||
|
|
0794efcf41 | ||
|
|
b9ea82f2c1 | ||
|
|
8b705b3370 | ||
|
|
c684607a4d | ||
|
|
b00833c2de | ||
|
|
0ca6bc6ab6 | ||
|
|
60faddff9b | ||
|
|
b35da8ad4b | ||
|
|
79887c148a | ||
|
|
1ae3457ee6 | ||
|
|
d2154c9d29 | ||
|
|
40ede24a99 | ||
|
|
5960ba919d | ||
|
|
f6aaaa8815 | ||
|
|
6f1b20c936 | ||
|
|
7734a50427 | ||
|
|
aef118d375 | ||
|
|
22cae71999 | ||
|
|
29d127303c | ||
|
|
5574f1c24f | ||
|
|
9457744571 | ||
|
|
19243c479c | ||
|
|
e868ce8328 | ||
|
|
ffa846c05a | ||
|
|
dde1791476 | ||
|
|
45438a7f06 | ||
|
|
c980e77ea3 | ||
|
|
176d3ddefa | ||
|
|
98d783d448 | ||
|
|
bcd6634d8a | ||
|
|
0b260cef2a | ||
|
|
6a68abbd67 | ||
|
|
9fcf23c802 | ||
|
|
5c2c08e051 | ||
|
|
1f254997e1 | ||
|
|
4f95af0864 | ||
|
|
6ff39be9d2 | ||
|
|
6cf5a47971 | ||
|
|
56da53c700 | ||
|
|
7091e10795 | ||
|
|
34765c5741 | ||
|
|
36c139872a | ||
|
|
1e77cec677 | ||
|
|
e95e3fb2d0 | ||
|
|
39c2234e38 | ||
|
|
f4fff5d9cb | ||
|
|
659785f972 | ||
|
|
85c04f6e3e | ||
|
|
bef117cbe8 | ||
|
|
46dd7cf86e | ||
|
|
9ed5a97267 | ||
|
|
cc2da70db2 | ||
|
|
cedd93e774 | ||
|
|
632e1692eb | ||
|
|
4861592d2a | ||
|
|
22e6d4edf3 | ||
|
|
e9bd7ff72f | ||
|
|
e7228fb489 | ||
|
|
96c03a68f2 | ||
|
|
4f6f248421 | ||
|
|
a8f14c86fd | ||
|
|
36de3d1e25 | ||
|
|
48bc4570e1 | ||
|
|
94b272dbae | ||
|
|
c093edf459 | ||
|
|
0164feffcc | ||
|
|
8cd377b99f | ||
|
|
74282c8ac5 | ||
|
|
d2158e5e44 | ||
|
|
9ea16ad1d1 | ||
|
|
45941adb71 | ||
|
|
c4d662fd2b | ||
|
|
d9ce3cda66 | ||
|
|
2fa8507d69 | ||
|
|
00d4ee47de | ||
|
|
413c71eb0a | ||
|
|
2b761279e4 | ||
|
|
1e7bbfa7c1 | ||
|
|
dc7245ff6e | ||
|
|
ffaf7b40e9 | ||
|
|
99355d993a | ||
|
|
d25f6e813c | ||
|
|
043f8e0523 | ||
|
|
5fcf2a2623 | ||
|
|
f1422adf75 | ||
|
|
c2b1742582 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -26,4 +26,5 @@ _site/**
|
||||
.LCKpom.xml~
|
||||
#coverity
|
||||
/cov-int/
|
||||
/dependency-check-core/nbproject/
|
||||
/dependency-check-core/nbproject/
|
||||
cov-scan.bat
|
||||
14
Dockerfile
Normal file
14
Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM java:8
|
||||
|
||||
MAINTAINER Timo Pagel <dependencycheckmaintainer@timo-pagel.de>
|
||||
|
||||
RUN wget -O /tmp/current.txt http://jeremylong.github.io/DependencyCheck/current.txt && current=$(cat /tmp/current.txt) && wget https://dl.bintray.com/jeremy-long/owasp/dependency-check-$current-release.zip && unzip dependency-check-$current-release.zip && mv dependency-check /usr/share/
|
||||
|
||||
RUN useradd -ms /bin/bash dockeruser && chown -R dockeruser:dockeruser /usr/share/dependency-check && mkdir /report && chown -R dockeruser:dockeruser /report
|
||||
USER dockeruser
|
||||
|
||||
VOLUME "/src /usr/share/dependency-check/data /report"
|
||||
|
||||
WORKDIR /report
|
||||
|
||||
ENTRYPOINT ["/usr/share/dependency-check/bin/dependency-check.sh", "--scan", "/src"]
|
||||
34
README.md
34
README.md
@@ -1,4 +1,5 @@
|
||||
[](https://travis-ci.org/jeremylong/DependencyCheck) [](https://www.apache.org/licenses/LICENSE-2.0.txt)
|
||||
[](https://travis-ci.org/jeremylong/DependencyCheck) [](https://www.apache.org/licenses/LICENSE-2.0.txt) [](https://scan.coverity.com/projects/dependencycheck)
|
||||
|
||||
Dependency-Check
|
||||
================
|
||||
|
||||
@@ -96,6 +97,37 @@ On Windows
|
||||
|
||||
Then load the resulting 'DependencyCheck-Report.html' into your favorite browser.
|
||||
|
||||
### Docker
|
||||
|
||||
In the following example it is assumed that the source to be checked is in the actual directory. A persistent data directory and a persistent report directory is used so that the container can be destroyed after running it to make sure that you use the newst version, always.
|
||||
```
|
||||
# After the first run, feel free to change the owner of the directories to the owner of the creted files and the permissions to 744
|
||||
DATA_DIRECTORY=$HOME/OWASP-Dependency-Check/data
|
||||
REPORT_DIRECTORY=/$HOME/OWASP-Dependency-Check/reports
|
||||
|
||||
if [ ! -d $DATA_DIRECTORY ]; then
|
||||
echo "Initially creating persistent directories"
|
||||
mkdir -p $DATA_DIRECTORY
|
||||
chmod -R 777 $DATA_DIRECTORY
|
||||
|
||||
mkdir -p $REPORT_DIRECTORY
|
||||
chmod -R 777 $REPORT_DIRECTORY
|
||||
fi
|
||||
|
||||
docker pull owasp/dependency-check # Make sure it is the actual version
|
||||
|
||||
docker run --rm \
|
||||
--volume $(pwd):/src \
|
||||
--volume $DATA_DIRECTORY:/usr/share/dependency-check/data \
|
||||
--volume $REPORT_DIRECTORY:/report \
|
||||
--name dependency-check \
|
||||
dc \
|
||||
--suppression "/src/security/dependency-check-suppression.xml"\
|
||||
--format "ALL" \
|
||||
--project "My OWASP Dependency Check Projekt" \
|
||||
```
|
||||
|
||||
|
||||
Mailing List
|
||||
------------
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>1.4.2</version>
|
||||
<version>1.4.4</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-ant</artifactId>
|
||||
|
||||
@@ -24,16 +24,21 @@ import org.slf4j.helpers.MarkerIgnoringBase;
|
||||
import org.slf4j.helpers.MessageFormatter;
|
||||
|
||||
/**
|
||||
* An instance of {@link org.slf4j.Logger} which simply calls the log method on the delegate Ant task.
|
||||
* An instance of {@link org.slf4j.Logger} which simply calls the log method on
|
||||
* the delegate Ant task.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
public class AntLoggerAdapter extends MarkerIgnoringBase {
|
||||
|
||||
/**
|
||||
* serialization UID.
|
||||
*/
|
||||
private static final long serialVersionUID = -1337;
|
||||
/**
|
||||
* A reference to the Ant task used for logging.
|
||||
*/
|
||||
private Task task;
|
||||
private transient Task task;
|
||||
|
||||
/**
|
||||
* Constructs an Ant Logger Adapter.
|
||||
|
||||
@@ -346,6 +346,28 @@ public class Check extends Update {
|
||||
public void setSuppressionFile(String suppressionFile) {
|
||||
this.suppressionFile = suppressionFile;
|
||||
}
|
||||
/**
|
||||
* The path to the suppression file.
|
||||
*/
|
||||
private String hintsFile;
|
||||
|
||||
/**
|
||||
* Get the value of hintsFile.
|
||||
*
|
||||
* @return the value of hintsFile
|
||||
*/
|
||||
public String getHintsFile() {
|
||||
return hintsFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of hintsFile.
|
||||
*
|
||||
* @param hintsFile new value of hintsFile
|
||||
*/
|
||||
public void setHintsFile(String hintsFile) {
|
||||
this.hintsFile = hintsFile;
|
||||
}
|
||||
/**
|
||||
* flag indicating whether or not to show a summary of findings.
|
||||
*/
|
||||
@@ -904,6 +926,7 @@ public class Check extends Update {
|
||||
super.populateSettings();
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, enableExperimental);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_JAR_ENABLED, jarAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, pyDistributionAnalyzerEnabled);
|
||||
|
||||
@@ -2,7 +2,7 @@ Configuration
|
||||
====================
|
||||
The dependency-check-purge task deletes the local copy of the NVD. This task
|
||||
should rarely be used, if ever. This is included as a convenience method in
|
||||
the rare circumstance that the local H2 database because corrupt.
|
||||
the rare circumstance that the local H2 database becomes corrupt.
|
||||
|
||||
```xml
|
||||
<target name="dependency-check-purge" description="Dependency-Check purge">
|
||||
|
||||
@@ -39,6 +39,7 @@ projectName | The name of the project being scanned.
|
||||
reportFormat | The report format to be generated (HTML, XML, VULN, ALL). This configuration option has no affect if using this within the Site plugin unless the externalReport is set to true. | HTML
|
||||
reportOutputDirectory | The location to write the report(s). Note, this is not used if generating the report as part of a `mvn site` build | 'target'
|
||||
suppressionFile | The file path to the XML suppression file \- used to suppress [false positives](../general/suppression.html) |
|
||||
hintsFile | The file path to the XML hints file \- used to resolve [false negatives](../general/hints.html) |
|
||||
proxyServer | The Proxy Server; see the [proxy configuration](../data/proxy.html) page for more information. |
|
||||
proxyPort | The Proxy Port. |
|
||||
proxyUsername | Defines the proxy user name. |
|
||||
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>1.4.2</version>
|
||||
<version>1.4.4</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-cli</artifactId>
|
||||
|
||||
@@ -19,6 +19,7 @@ package org.owasp.dependencycheck;
|
||||
|
||||
import ch.qos.logback.classic.LoggerContext;
|
||||
import ch.qos.logback.classic.encoder.PatternLayoutEncoder;
|
||||
import ch.qos.logback.classic.spi.ILoggingEvent;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
@@ -158,8 +159,13 @@ public class App {
|
||||
exitCode = -4;
|
||||
}
|
||||
try {
|
||||
runScan(cli.getReportDirectory(), cli.getReportFormat(), cli.getProjectName(), cli.getScanFiles(),
|
||||
cli.getExcludeList(), cli.getSymLinkDepth());
|
||||
final String[] scanFiles = cli.getScanFiles();
|
||||
if (scanFiles != null) {
|
||||
runScan(cli.getReportDirectory(), cli.getReportFormat(), cli.getProjectName(), scanFiles,
|
||||
cli.getExcludeList(), cli.getSymLinkDepth());
|
||||
} else {
|
||||
LOGGER.error("No scan files configured");
|
||||
}
|
||||
} catch (InvalidScanPathException ex) {
|
||||
LOGGER.error("An invalid scan path was detected; unable to scan '//*' paths");
|
||||
exitCode = -10;
|
||||
@@ -172,7 +178,7 @@ public class App {
|
||||
} catch (ExceptionCollection ex) {
|
||||
if (ex.isFatal()) {
|
||||
exitCode = -13;
|
||||
LOGGER.error("One or more fatal errors occured");
|
||||
LOGGER.error("One or more fatal errors occurred");
|
||||
} else {
|
||||
exitCode = -14;
|
||||
}
|
||||
@@ -293,7 +299,7 @@ public class App {
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
if (exCol != null && exCol.getExceptions().size()>0) {
|
||||
if (exCol != null && exCol.getExceptions().size() > 0) {
|
||||
throw exCol;
|
||||
}
|
||||
} finally {
|
||||
@@ -301,7 +307,7 @@ public class App {
|
||||
engine.cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -342,6 +348,7 @@ public class App {
|
||||
final String dataDirectory = cli.getDataDirectory();
|
||||
final File propertiesFile = cli.getPropertiesFile();
|
||||
final String suppressionFile = cli.getSuppressionFile();
|
||||
final String hintsFile = cli.getHintsFile();
|
||||
final String nexusUrl = cli.getNexusUrl();
|
||||
final String databaseDriverName = cli.getDatabaseDriverName();
|
||||
final String databaseDriverPath = cli.getDatabaseDriverPath();
|
||||
@@ -389,6 +396,7 @@ public class App {
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPass);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
|
||||
Settings.setIntIfNotNull(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
|
||||
|
||||
//File Type Analyzer Settings
|
||||
@@ -440,7 +448,7 @@ public class App {
|
||||
encoder.setPattern("%d %C:%L%n%-5level - %msg%n");
|
||||
encoder.setContext(context);
|
||||
encoder.start();
|
||||
final FileAppender fa = new FileAppender();
|
||||
final FileAppender<ILoggingEvent> fa = new FileAppender<ILoggingEvent>();
|
||||
fa.setAppend(true);
|
||||
fa.setEncoder(encoder);
|
||||
fa.setContext(context);
|
||||
|
||||
@@ -196,6 +196,10 @@ public final class CliParser {
|
||||
isValid = false;
|
||||
final String msg = String.format("Invalid '%s' argument: '%s'%nUnable to scan paths that start with '//'.", argumentName, path);
|
||||
throw new FileNotFoundException(msg);
|
||||
} else if ((path.endsWith("/*") && !path.endsWith("**/*")) || (path.endsWith("\\*") && path.endsWith("**\\*"))) {
|
||||
final String msg = String.format("Possibly incorrect path '%s' from argument '%s' because it ends with a slash star; "
|
||||
+ "dependency-check uses ant-style paths", path, argumentName);
|
||||
LOGGER.warn(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -273,6 +277,10 @@ public final class CliParser {
|
||||
.desc("The file path to the suppression XML file.")
|
||||
.build();
|
||||
|
||||
final Option hintsFile = Option.builder().argName("file").hasArg().longOpt(ARGUMENT.HINTS_FILE)
|
||||
.desc("The file path to the hints XML file.")
|
||||
.build();
|
||||
|
||||
final Option cveValidForHours = Option.builder().argName("hours").hasArg().longOpt(ARGUMENT.CVE_VALID_FOR_HOURS)
|
||||
.desc("The number of hours to wait before checking for new updates from the NVD.")
|
||||
.build();
|
||||
@@ -301,6 +309,7 @@ public final class CliParser {
|
||||
.addOption(props)
|
||||
.addOption(verboseLog)
|
||||
.addOption(suppressionFile)
|
||||
.addOption(hintsFile)
|
||||
.addOption(cveValidForHours)
|
||||
.addOption(experimentalEnabled);
|
||||
}
|
||||
@@ -958,6 +967,15 @@ public final class CliParser {
|
||||
return line.getOptionValue(ARGUMENT.SUPPRESSION_FILE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the path to the hints file.
|
||||
*
|
||||
* @return the path to the hints file
|
||||
*/
|
||||
public String getHintsFile() {
|
||||
return line.getOptionValue(ARGUMENT.HINTS_FILE);
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Prints the manifest information to standard output.</p>
|
||||
@@ -966,7 +984,7 @@ public final class CliParser {
|
||||
*/
|
||||
public void printVersionInfo() {
|
||||
final String version = String.format("%s version %s",
|
||||
Settings.getString(Settings.KEYS.APPLICATION_VAME, "dependency-check"),
|
||||
Settings.getString(Settings.KEYS.APPLICATION_NAME, "dependency-check"),
|
||||
Settings.getString(Settings.KEYS.APPLICATION_VERSION, "Unknown"));
|
||||
System.out.println(version);
|
||||
}
|
||||
@@ -1269,9 +1287,14 @@ public final class CliParser {
|
||||
*/
|
||||
public static final String SUPPRESSION_FILE = "suppression";
|
||||
/**
|
||||
* The CLI argument name for setting the location of the suppression
|
||||
* The CLI argument name for setting the location of the hint
|
||||
* file.
|
||||
*/
|
||||
public static final String HINTS_FILE = "hints";
|
||||
/**
|
||||
* The CLI argument name for setting the number of hours to wait before
|
||||
* checking for new updates from the NVD.
|
||||
*/
|
||||
public static final String CVE_VALID_FOR_HOURS = "cveValidForHours";
|
||||
/**
|
||||
* Disables the Jar Analyzer.
|
||||
|
||||
@@ -9,10 +9,7 @@ Installation & Usage
|
||||
====================
|
||||
Download the dependency-check command line tool [here](http://dl.bintray.com/jeremy-long/owasp/dependency-check-${project.version}-release.zip).
|
||||
Extract the zip file to a location on your computer and put the 'bin' directory into the
|
||||
path environment variable. On \*nix systems you will likely need to make the shell
|
||||
script executable:
|
||||
|
||||
$ chmod +777 dependency-check.sh
|
||||
path environment variable.
|
||||
|
||||
#set( $H = '#' )
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>1.4.2</version>
|
||||
<version>1.4.4</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-core</artifactId>
|
||||
|
||||
@@ -0,0 +1,119 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2016 Stefan Neuhaus. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck;
|
||||
|
||||
import org.owasp.dependencycheck.analyzer.Analyzer;
|
||||
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
/**
|
||||
* Task to support parallelism of dependency-check analysis.
|
||||
* Analyses a single {@link Dependency} by a specific {@link Analyzer}.
|
||||
*
|
||||
* @author Stefan Neuhaus
|
||||
*/
|
||||
class AnalysisTask implements Callable<Void> {
|
||||
|
||||
/**
|
||||
* Instance of the logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(AnalysisTask.class);
|
||||
|
||||
/**
|
||||
* A reference to the analyzer.
|
||||
*/
|
||||
private final Analyzer analyzer;
|
||||
/**
|
||||
* The dependency to analyze.
|
||||
*/
|
||||
private final Dependency dependency;
|
||||
/**
|
||||
* A reference to the dependency-check engine.
|
||||
*/
|
||||
private final Engine engine;
|
||||
/**
|
||||
* The list of exceptions that may occur during analysis.
|
||||
*/
|
||||
private final List<Throwable> exceptions;
|
||||
|
||||
/**
|
||||
* Creates a new analysis task.
|
||||
*
|
||||
* @param analyzer a reference of the analyzer to execute
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the dependency-check engine
|
||||
* @param exceptions exceptions that occur during analysis will be added to
|
||||
* this collection of exceptions
|
||||
*/
|
||||
AnalysisTask(Analyzer analyzer, Dependency dependency, Engine engine, List<Throwable> exceptions) {
|
||||
this.analyzer = analyzer;
|
||||
this.dependency = dependency;
|
||||
this.engine = engine;
|
||||
this.exceptions = exceptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the analysis task.
|
||||
*
|
||||
* @return null
|
||||
* @throws Exception thrown if unable to execute the analysis task
|
||||
*/
|
||||
@Override
|
||||
public Void call() {
|
||||
Settings.initialize();
|
||||
|
||||
if (shouldAnalyze()) {
|
||||
LOGGER.debug("Begin Analysis of '{}' ({})", dependency.getActualFilePath(), analyzer.getName());
|
||||
try {
|
||||
analyzer.analyze(dependency, engine);
|
||||
} catch (AnalysisException ex) {
|
||||
LOGGER.warn("An error occurred while analyzing '{}' ({}).", dependency.getActualFilePath(), analyzer.getName());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
} catch (Throwable ex) {
|
||||
LOGGER.warn("An unexpected error occurred during analysis of '{}' ({}): {}",
|
||||
dependency.getActualFilePath(), analyzer.getName(), ex.getMessage());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the analyzer can analyze the given dependency.
|
||||
*
|
||||
* @return whether or not the analyzer can analyze the dependency
|
||||
*/
|
||||
boolean shouldAnalyze() {
|
||||
if (analyzer instanceof FileTypeAnalyzer) {
|
||||
final FileTypeAnalyzer fileTypeAnalyzer = (FileTypeAnalyzer) analyzer;
|
||||
return fileTypeAnalyzer.accept(dependency.getActualFile());
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -21,7 +21,6 @@ import org.owasp.dependencycheck.analyzer.AnalysisPhase;
|
||||
import org.owasp.dependencycheck.analyzer.Analyzer;
|
||||
import org.owasp.dependencycheck.analyzer.AnalyzerService;
|
||||
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.ConnectionFactory;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
@@ -29,9 +28,9 @@ import org.owasp.dependencycheck.data.update.CachedWebDataSource;
|
||||
import org.owasp.dependencycheck.data.update.UpdateService;
|
||||
import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.NoDataException;
|
||||
import org.owasp.dependencycheck.exception.ExceptionCollection;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.exception.NoDataException;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
@@ -41,12 +40,19 @@ import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CancellationException;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Scans files, directories, etc. for Dependencies. Analyzers are loaded and
|
||||
@@ -61,7 +67,7 @@ public class Engine implements FileFilter {
|
||||
/**
|
||||
* The list of dependencies.
|
||||
*/
|
||||
private List<Dependency> dependencies = new ArrayList<Dependency>();
|
||||
private final List<Dependency> dependencies = Collections.synchronizedList(new ArrayList<Dependency>());
|
||||
/**
|
||||
* A Map of analyzers grouped by Analysis phase.
|
||||
*/
|
||||
@@ -156,9 +162,14 @@ public class Engine implements FileFilter {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the dependencies identified.
|
||||
* Get the dependencies identified. The returned list is a reference to the
|
||||
* engine's synchronized list. You must synchronize on it, when you modify
|
||||
* and iterate over it from multiple threads. E.g. this holds for analyzers
|
||||
* supporting parallel processing during their analysis phase.
|
||||
*
|
||||
* @return the dependencies identified
|
||||
* @see Collections#synchronizedList(List)
|
||||
* @see Analyzer#supportsParallelProcessing()
|
||||
*/
|
||||
public List<Dependency> getDependencies() {
|
||||
return dependencies;
|
||||
@@ -170,7 +181,10 @@ public class Engine implements FileFilter {
|
||||
* @param dependencies the dependencies
|
||||
*/
|
||||
public void setDependencies(List<Dependency> dependencies) {
|
||||
this.dependencies = dependencies;
|
||||
synchronized (this.dependencies) {
|
||||
this.dependencies.clear();
|
||||
this.dependencies.addAll(dependencies);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -183,9 +197,24 @@ public class Engine implements FileFilter {
|
||||
* @since v0.3.2.5
|
||||
*/
|
||||
public List<Dependency> scan(String[] paths) {
|
||||
return scan(paths, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans an array of files or directories. If a directory is specified, it
|
||||
* will be scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param paths an array of paths to files or directories to be analyzed
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of dependencies scanned
|
||||
* @since v1.4.4
|
||||
*/
|
||||
public List<Dependency> scan(String[] paths, String projectReference) {
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
for (String path : paths) {
|
||||
final List<Dependency> d = scan(path);
|
||||
final List<Dependency> d = scan(path, projectReference);
|
||||
if (d != null) {
|
||||
deps.addAll(d);
|
||||
}
|
||||
@@ -202,8 +231,23 @@ public class Engine implements FileFilter {
|
||||
* @return the list of dependencies scanned
|
||||
*/
|
||||
public List<Dependency> scan(String path) {
|
||||
return scan(path, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a given file or directory. If a directory is specified, it will be
|
||||
* scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param path the path to a file or directory to be analyzed
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of dependencies scanned
|
||||
* @since v1.4.4
|
||||
*/
|
||||
public List<Dependency> scan(String path, String projectReference) {
|
||||
final File file = new File(path);
|
||||
return scan(file);
|
||||
return scan(file, projectReference);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -216,9 +260,24 @@ public class Engine implements FileFilter {
|
||||
* @since v0.3.2.5
|
||||
*/
|
||||
public List<Dependency> scan(File[] files) {
|
||||
return scan(files, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans an array of files or directories. If a directory is specified, it
|
||||
* will be scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param files an array of paths to files or directories to be analyzed.
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of dependencies
|
||||
* @since v1.4.4
|
||||
*/
|
||||
public List<Dependency> scan(File[] files, String projectReference) {
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
for (File file : files) {
|
||||
final List<Dependency> d = scan(file);
|
||||
final List<Dependency> d = scan(file, projectReference);
|
||||
if (d != null) {
|
||||
deps.addAll(d);
|
||||
}
|
||||
@@ -236,9 +295,24 @@ public class Engine implements FileFilter {
|
||||
* @since v0.3.2.5
|
||||
*/
|
||||
public List<Dependency> scan(Collection<File> files) {
|
||||
return scan(files, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a collection of files or directories. If a directory is specified,
|
||||
* it will be scanned recursively. Any dependencies identified are added to
|
||||
* the dependency collection.
|
||||
*
|
||||
* @param files a set of paths to files or directories to be analyzed
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of dependencies scanned
|
||||
* @since v1.4.4
|
||||
*/
|
||||
public List<Dependency> scan(Collection<File> files, String projectReference) {
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
for (File file : files) {
|
||||
final List<Dependency> d = scan(file);
|
||||
final List<Dependency> d = scan(file, projectReference);
|
||||
if (d != null) {
|
||||
deps.addAll(d);
|
||||
}
|
||||
@@ -256,11 +330,26 @@ public class Engine implements FileFilter {
|
||||
* @since v0.3.2.4
|
||||
*/
|
||||
public List<Dependency> scan(File file) {
|
||||
return scan(file, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a given file or directory. If a directory is specified, it will be
|
||||
* scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param file the path to a file or directory to be analyzed
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of dependencies scanned
|
||||
* @since v1.4.4
|
||||
*/
|
||||
public List<Dependency> scan(File file, String projectReference) {
|
||||
if (file.exists()) {
|
||||
if (file.isDirectory()) {
|
||||
return scanDirectory(file);
|
||||
return scanDirectory(file, projectReference);
|
||||
} else {
|
||||
final Dependency d = scanFile(file);
|
||||
final Dependency d = scanFile(file, projectReference);
|
||||
if (d != null) {
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
deps.add(d);
|
||||
@@ -279,17 +368,31 @@ public class Engine implements FileFilter {
|
||||
* @return the list of Dependency objects scanned
|
||||
*/
|
||||
protected List<Dependency> scanDirectory(File dir) {
|
||||
return scanDirectory(dir, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively scans files and directories. Any dependencies identified are
|
||||
* added to the dependency collection.
|
||||
*
|
||||
* @param dir the directory to scan
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of Dependency objects scanned
|
||||
* @since v1.4.4
|
||||
*/
|
||||
protected List<Dependency> scanDirectory(File dir, String projectReference) {
|
||||
final File[] files = dir.listFiles();
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
if (files != null) {
|
||||
for (File f : files) {
|
||||
if (f.isDirectory()) {
|
||||
final List<Dependency> d = scanDirectory(f);
|
||||
final List<Dependency> d = scanDirectory(f, projectReference);
|
||||
if (d != null) {
|
||||
deps.addAll(d);
|
||||
}
|
||||
} else {
|
||||
final Dependency d = scanFile(f);
|
||||
final Dependency d = scanFile(f, projectReference);
|
||||
deps.add(d);
|
||||
}
|
||||
}
|
||||
@@ -305,14 +408,54 @@ public class Engine implements FileFilter {
|
||||
* @return the scanned dependency
|
||||
*/
|
||||
protected Dependency scanFile(File file) {
|
||||
return scanFile(file, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a specified file. If a dependency is identified it is added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param file The file to scan
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the scanned dependency
|
||||
* @since v1.4.4
|
||||
*/
|
||||
protected Dependency scanFile(File file, String projectReference) {
|
||||
Dependency dependency = null;
|
||||
if (file.isFile()) {
|
||||
if (accept(file)) {
|
||||
dependency = new Dependency(file);
|
||||
dependencies.add(dependency);
|
||||
if (projectReference != null) {
|
||||
dependency.addProjectReference(projectReference);
|
||||
}
|
||||
final String sha1 = dependency.getSha1sum();
|
||||
boolean found = false;
|
||||
synchronized (dependencies) {
|
||||
if (sha1 != null) {
|
||||
for (Dependency existing : dependencies) {
|
||||
if (sha1.equals(existing.getSha1sum())) {
|
||||
found = true;
|
||||
if (projectReference != null) {
|
||||
existing.addProjectReference(projectReference);
|
||||
}
|
||||
if (existing.getActualFilePath() != null && dependency.getActualFilePath() != null
|
||||
&& !existing.getActualFilePath().equals(dependency.getActualFilePath())) {
|
||||
existing.addRelatedDependency(dependency);
|
||||
} else {
|
||||
dependency = existing;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
dependencies.add(dependency);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
LOGGER.debug("Path passed to scanFile(File) is not a file: {}. Skipping the file.", file);
|
||||
}
|
||||
} else {
|
||||
LOGGER.debug("Path passed to scanFile(File) is not a file: {}. Skipping the file.", file);
|
||||
}
|
||||
return dependency;
|
||||
}
|
||||
@@ -323,7 +466,7 @@ public class Engine implements FileFilter {
|
||||
* iterates over a copy of the dependencies list. Thus, the potential for
|
||||
* {@link java.util.ConcurrentModificationException}s is avoided, and
|
||||
* analyzers may safely add or remove entries from the dependencies list.
|
||||
*
|
||||
* <p>
|
||||
* Every effort is made to complete analysis on the dependencies. In some
|
||||
* cases an exception will occur with part of the analysis being performed
|
||||
* which may not affect the entire analysis. If an exception occurs it will
|
||||
@@ -333,7 +476,7 @@ public class Engine implements FileFilter {
|
||||
* during analysis
|
||||
*/
|
||||
public void analyzeDependencies() throws ExceptionCollection {
|
||||
final List<Throwable> exceptions = new ArrayList<Throwable>();
|
||||
final List<Throwable> exceptions = Collections.synchronizedList(new ArrayList<Throwable>());
|
||||
boolean autoUpdate = true;
|
||||
try {
|
||||
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
|
||||
@@ -356,61 +499,33 @@ public class Engine implements FileFilter {
|
||||
try {
|
||||
ensureDataExists();
|
||||
} catch (NoDataException ex) {
|
||||
LOGGER.error("{}\n\nUnable to continue dependency-check analysis.", ex.getMessage());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
throw new ExceptionCollection("Unable to continue dependency-check analysis.", exceptions, true);
|
||||
throwFatalExceptionCollection("Unable to continue dependency-check analysis.", ex, exceptions);
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.error("{}\n\nUnable to continue dependency-check analysis.", ex.getMessage());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
throw new ExceptionCollection("Unable to connect to the dependency-check database", exceptions, true);
|
||||
throwFatalExceptionCollection("Unable to connect to the dependency-check database.", ex, exceptions);
|
||||
}
|
||||
|
||||
LOGGER.debug("\n----------------------------------------------------\nBEGIN ANALYSIS\n----------------------------------------------------");
|
||||
LOGGER.info("Analysis Starting");
|
||||
LOGGER.info("Analysis Started");
|
||||
final long analysisStart = System.currentTimeMillis();
|
||||
|
||||
// analysis phases
|
||||
for (AnalysisPhase phase : AnalysisPhase.values()) {
|
||||
final List<Analyzer> analyzerList = analyzers.get(phase);
|
||||
|
||||
for (Analyzer a : analyzerList) {
|
||||
for (final Analyzer analyzer : analyzerList) {
|
||||
final long analyzerStart = System.currentTimeMillis();
|
||||
try {
|
||||
a = initializeAnalyzer(a);
|
||||
initializeAnalyzer(analyzer);
|
||||
} catch (InitializationException ex) {
|
||||
exceptions.add(ex);
|
||||
continue;
|
||||
}
|
||||
|
||||
/* need to create a copy of the collection because some of the
|
||||
* analyzers may modify it. This prevents ConcurrentModificationExceptions.
|
||||
* This is okay for adds/deletes because it happens per analyzer.
|
||||
*/
|
||||
LOGGER.debug("Begin Analyzer '{}'", a.getName());
|
||||
final Set<Dependency> dependencySet = new HashSet<Dependency>(dependencies);
|
||||
for (Dependency d : dependencySet) {
|
||||
boolean shouldAnalyze = true;
|
||||
if (a instanceof FileTypeAnalyzer) {
|
||||
final FileTypeAnalyzer fAnalyzer = (FileTypeAnalyzer) a;
|
||||
shouldAnalyze = fAnalyzer.accept(d.getActualFile());
|
||||
}
|
||||
if (shouldAnalyze) {
|
||||
LOGGER.debug("Begin Analysis of '{}'", d.getActualFilePath());
|
||||
try {
|
||||
a.analyze(d, this);
|
||||
} catch (AnalysisException ex) {
|
||||
LOGGER.warn("An error occurred while analyzing '{}'.", d.getActualFilePath());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
} catch (Throwable ex) {
|
||||
//final AnalysisException ax = new AnalysisException(axMsg, ex);
|
||||
LOGGER.warn("An unexpected error occurred during analysis of '{}'", d.getActualFilePath());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
executeAnalysisTasks(analyzer, exceptions);
|
||||
|
||||
final long analyzerDurationMillis = System.currentTimeMillis() - analyzerStart;
|
||||
final long analyzerDurationSeconds = TimeUnit.MILLISECONDS.toSeconds(analyzerDurationMillis);
|
||||
LOGGER.info("Finished {} ({} seconds)", analyzer.getName(), analyzerDurationSeconds);
|
||||
}
|
||||
}
|
||||
for (AnalysisPhase phase : AnalysisPhase.values()) {
|
||||
@@ -422,9 +537,80 @@ public class Engine implements FileFilter {
|
||||
}
|
||||
|
||||
LOGGER.debug("\n----------------------------------------------------\nEND ANALYSIS\n----------------------------------------------------");
|
||||
LOGGER.info("Analysis Complete ({} ms)", System.currentTimeMillis() - analysisStart);
|
||||
final long analysisDurationSeconds = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - analysisStart);
|
||||
LOGGER.info("Analysis Complete ({} seconds)", analysisDurationSeconds);
|
||||
if (exceptions.size() > 0) {
|
||||
throw new ExceptionCollection("One or more exceptions occured during dependency-check analysis", exceptions);
|
||||
throw new ExceptionCollection("One or more exceptions occurred during dependency-check analysis", exceptions);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes executes the analyzer using multiple threads.
|
||||
*
|
||||
* @param exceptions a collection of exceptions that occurred during
|
||||
* analysis
|
||||
* @param analyzer the analyzer to execute
|
||||
* @throws ExceptionCollection thrown if exceptions occurred during analysis
|
||||
*/
|
||||
void executeAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) throws ExceptionCollection {
|
||||
LOGGER.debug("Starting {}", analyzer.getName());
|
||||
final List<AnalysisTask> analysisTasks = getAnalysisTasks(analyzer, exceptions);
|
||||
final ExecutorService executorService = getExecutorService(analyzer);
|
||||
|
||||
try {
|
||||
final List<Future<Void>> results = executorService.invokeAll(analysisTasks, 10, TimeUnit.MINUTES);
|
||||
|
||||
// ensure there was no exception during execution
|
||||
for (Future<Void> result : results) {
|
||||
try {
|
||||
result.get();
|
||||
} catch (ExecutionException e) {
|
||||
throwFatalExceptionCollection("Analysis task failed with a fatal exception.", e, exceptions);
|
||||
} catch (CancellationException e) {
|
||||
throwFatalExceptionCollection("Analysis task timed out.", e, exceptions);
|
||||
}
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
throwFatalExceptionCollection("Analysis has been interrupted.", e, exceptions);
|
||||
} finally {
|
||||
executorService.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the analysis tasks for the dependencies.
|
||||
*
|
||||
* @param analyzer the analyzer to create tasks for
|
||||
* @param exceptions the collection of exceptions to collect
|
||||
* @return a collection of analysis tasks
|
||||
*/
|
||||
List<AnalysisTask> getAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) {
|
||||
final List<AnalysisTask> result = new ArrayList<AnalysisTask>();
|
||||
synchronized (dependencies) {
|
||||
for (final Dependency dependency : dependencies) {
|
||||
final AnalysisTask task = new AnalysisTask(analyzer, dependency, this, exceptions);
|
||||
result.add(task);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the executor service for a given analyzer.
|
||||
*
|
||||
* @param analyzer the analyzer to obtain an executor
|
||||
* @return the executor service
|
||||
*/
|
||||
ExecutorService getExecutorService(Analyzer analyzer) {
|
||||
if (analyzer.supportsParallelProcessing()) {
|
||||
// just a fair trade-off that should be reasonable for all analyzer types
|
||||
final int maximumNumberOfThreads = 4 * Runtime.getRuntime().availableProcessors();
|
||||
|
||||
LOGGER.debug("Parallel processing with up to {} threads: {}.", maximumNumberOfThreads, analyzer.getName());
|
||||
return Executors.newFixedThreadPool(maximumNumberOfThreads);
|
||||
} else {
|
||||
LOGGER.debug("Parallel processing is not supported: {}.", analyzer.getName());
|
||||
return Executors.newSingleThreadExecutor();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -539,6 +725,16 @@ public class Engine implements FileFilter {
|
||||
return this.fileTypeAnalyzers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a file type analyzer. This has been added solely to assist in unit
|
||||
* testing the Engine.
|
||||
*
|
||||
* @param fta the file type analyzer to add
|
||||
*/
|
||||
protected void addFileTypeAnalyzer(FileTypeAnalyzer fta) {
|
||||
this.fileTypeAnalyzers.add(fta);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the CPE Index to ensure documents exists. If none exist a
|
||||
* NoDataException is thrown.
|
||||
@@ -560,4 +756,20 @@ public class Engine implements FileFilter {
|
||||
cve.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs and throws a fatal exception collection.
|
||||
*
|
||||
* @param message the exception message
|
||||
* @param throwable the cause
|
||||
* @param exceptions a collection of exception to include
|
||||
* @throws ExceptionCollection a collection of exceptions that occurred
|
||||
* during analysis
|
||||
*/
|
||||
private void throwFatalExceptionCollection(String message, Throwable throwable, List<Throwable> exceptions) throws ExceptionCollection {
|
||||
LOGGER.error("{}\n\n{}", throwable.getMessage(), message);
|
||||
LOGGER.debug("", throwable);
|
||||
exceptions.add(throwable);
|
||||
throw new ExceptionCollection(message, exceptions, true);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,4 +46,12 @@ public abstract class AbstractAnalyzer implements Analyzer {
|
||||
public void close() throws Exception {
|
||||
//do nothing
|
||||
}
|
||||
|
||||
/**
|
||||
* The default is to support parallel processing.
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -83,7 +83,7 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
/**
|
||||
* A flag indicating whether or not the analyzer is enabled.
|
||||
*/
|
||||
private boolean enabled = true;
|
||||
private volatile boolean enabled = true;
|
||||
|
||||
/**
|
||||
* Get the value of enabled.
|
||||
|
||||
@@ -130,20 +130,36 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
} else {
|
||||
file = new File(suppressionFilePath);
|
||||
InputStream suppressionsFromClasspath = null;
|
||||
if (!file.exists()) {
|
||||
final InputStream suppressionsFromClasspath = this.getClass().getClassLoader().getResourceAsStream(suppressionFilePath);
|
||||
if (suppressionsFromClasspath != null) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("suppression", "xml");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyInputStreamToFile(suppressionsFromClasspath, file);
|
||||
} catch (IOException ex) {
|
||||
throwSuppressionParseException("Unable to locate suppressions file in classpath", ex);
|
||||
try {
|
||||
suppressionsFromClasspath = this.getClass().getClassLoader().getResourceAsStream(suppressionFilePath);
|
||||
if (suppressionsFromClasspath != null) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("suppression", "xml");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyInputStreamToFile(suppressionsFromClasspath, file);
|
||||
} catch (IOException ex) {
|
||||
throwSuppressionParseException("Unable to locate suppressions file in classpath", ex);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (suppressionsFromClasspath != null) {
|
||||
try {
|
||||
suppressionsFromClasspath.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Failed to close stream", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (file != null) {
|
||||
if (!file.exists()) {
|
||||
final String msg = String.format("Suppression file '%s' does not exists", file.getPath());
|
||||
LOGGER.warn(msg);
|
||||
throw new SuppressionParseException(msg);
|
||||
}
|
||||
try {
|
||||
rules.addAll(parser.parseSuppressionRules(file));
|
||||
LOGGER.debug("{} suppression rules were loaded.", rules.size());
|
||||
@@ -157,6 +173,8 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
throwSuppressionParseException("Unable to fetch the configured suppression file", ex);
|
||||
} catch (MalformedURLException ex) {
|
||||
throwSuppressionParseException("Configured suppression file has an invalid URL", ex);
|
||||
} catch (SuppressionParseException ex) {
|
||||
throw ex;
|
||||
} catch (IOException ex) {
|
||||
throwSuppressionParseException("Unable to create temp file for suppressions", ex);
|
||||
} finally {
|
||||
|
||||
@@ -75,4 +75,12 @@ public interface Analyzer {
|
||||
* @throws Exception is thrown if an exception occurs closing the analyzer.
|
||||
*/
|
||||
void close() throws Exception;
|
||||
|
||||
/**
|
||||
* Returns whether multiple instances of the same type of analyzer can run in parallel.
|
||||
* Note that running analyzers of different types in parallel is not supported at all.
|
||||
*
|
||||
* @return {@code true} if the analyzer supports parallel processing, {@code false} else
|
||||
*/
|
||||
boolean supportsParallelProcessing();
|
||||
}
|
||||
|
||||
@@ -25,10 +25,8 @@ import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
@@ -220,6 +218,18 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Does not support parallel processing as it both modifies and iterates
|
||||
* over the engine's list of dependencies.
|
||||
*
|
||||
* @see #analyzeFileType(Dependency, Engine)
|
||||
* @see #findMoreDependencies(Engine, File)
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a
|
||||
* WAR or EAR, the contents are extracted, scanned, and added to the list of
|
||||
@@ -236,25 +246,42 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
extractFiles(f, tmpDir, engine);
|
||||
|
||||
//make a copy
|
||||
final Set<Dependency> dependencySet = findMoreDependencies(engine, tmpDir);
|
||||
final List<Dependency> dependencySet = findMoreDependencies(engine, tmpDir);
|
||||
|
||||
if (!dependencySet.isEmpty()) {
|
||||
for (Dependency d : dependencySet) {
|
||||
//fix the dependency's display name and path
|
||||
final String displayPath = String.format("%s%s",
|
||||
dependency.getFilePath(),
|
||||
d.getActualFilePath().substring(tmpDir.getAbsolutePath().length()));
|
||||
final String displayName = String.format("%s: %s",
|
||||
dependency.getFileName(),
|
||||
d.getFileName());
|
||||
d.setFilePath(displayPath);
|
||||
d.setFileName(displayName);
|
||||
if (d.getFilePath().startsWith(tmpDir.getAbsolutePath())) {
|
||||
//fix the dependency's display name and path
|
||||
final String displayPath = String.format("%s%s",
|
||||
dependency.getFilePath(),
|
||||
d.getActualFilePath().substring(tmpDir.getAbsolutePath().length()));
|
||||
final String displayName = String.format("%s: %s",
|
||||
dependency.getFileName(),
|
||||
d.getFileName());
|
||||
d.setFilePath(displayPath);
|
||||
d.setFileName(displayName);
|
||||
d.setProjectReferences(dependency.getProjectReferences());
|
||||
|
||||
//TODO - can we get more evidence from the parent? EAR contains module name, etc.
|
||||
//analyze the dependency (i.e. extract files) if it is a supported type.
|
||||
if (this.accept(d.getActualFile()) && scanDepth < MAX_SCAN_DEPTH) {
|
||||
scanDepth += 1;
|
||||
analyze(d, engine);
|
||||
scanDepth -= 1;
|
||||
//TODO - can we get more evidence from the parent? EAR contains module name, etc.
|
||||
//analyze the dependency (i.e. extract files) if it is a supported type.
|
||||
if (this.accept(d.getActualFile()) && scanDepth < MAX_SCAN_DEPTH) {
|
||||
scanDepth += 1;
|
||||
analyze(d, engine);
|
||||
scanDepth -= 1;
|
||||
}
|
||||
} else {
|
||||
for (Dependency sub : dependencySet) {
|
||||
if (sub.getFilePath().startsWith(tmpDir.getAbsolutePath())) {
|
||||
final String displayPath = String.format("%s%s",
|
||||
dependency.getFilePath(),
|
||||
sub.getActualFilePath().substring(tmpDir.getAbsolutePath().length()));
|
||||
final String displayName = String.format("%s: %s",
|
||||
dependency.getFileName(),
|
||||
sub.getFileName());
|
||||
sub.setFilePath(displayPath);
|
||||
sub.setFileName(displayName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -279,30 +306,37 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final String fileName = dependency.getFileName();
|
||||
|
||||
LOGGER.info("The zip file '{}' appears to be a JAR file, making a copy and analyzing it as a JAR.", fileName);
|
||||
|
||||
final File tmpLoc = new File(tdir, fileName.substring(0, fileName.length() - 3) + "jar");
|
||||
//store the archives sha1 and change it so that the engine doesn't think the zip and jar file are the same
|
||||
// and add it is a related dependency.
|
||||
final String archiveSha1 = dependency.getSha1sum();
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyFile(tdir, tmpLoc);
|
||||
final Set<Dependency> dependencySet = findMoreDependencies(engine, tmpLoc);
|
||||
dependency.setSha1sum("");
|
||||
org.apache.commons.io.FileUtils.copyFile(dependency.getActualFile(), tmpLoc);
|
||||
final List<Dependency> dependencySet = findMoreDependencies(engine, tmpLoc);
|
||||
if (!dependencySet.isEmpty()) {
|
||||
if (dependencySet.size() != 1) {
|
||||
LOGGER.info("Deep copy of ZIP to JAR file resulted in more than one dependency?");
|
||||
}
|
||||
for (Dependency d : dependencySet) {
|
||||
//fix the dependency's display name and path
|
||||
d.setFilePath(dependency.getFilePath());
|
||||
d.setDisplayFileName(dependency.getFileName());
|
||||
if (d.getActualFile().equals(tmpLoc)) {
|
||||
d.setFilePath(dependency.getFilePath());
|
||||
d.setDisplayFileName(dependency.getFileName());
|
||||
} else {
|
||||
for (Dependency sub : d.getRelatedDependencies()) {
|
||||
if (sub.getActualFile().equals(tmpLoc)) {
|
||||
sub.setFilePath(dependency.getFilePath());
|
||||
sub.setDisplayFileName(dependency.getFileName());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Unable to perform deep copy on '{}'", dependency.getActualFile().getPath(), ex);
|
||||
} finally {
|
||||
dependency.setSha1sum(archiveSha1);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* An empty dependency set.
|
||||
*/
|
||||
private static final Set<Dependency> EMPTY_DEPENDENCY_SET = Collections.emptySet();
|
||||
|
||||
/**
|
||||
* Scan the given file/folder, and return any new dependencies found.
|
||||
@@ -311,20 +345,9 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @param file target of scanning
|
||||
* @return any dependencies that weren't known to the engine before
|
||||
*/
|
||||
private static Set<Dependency> findMoreDependencies(Engine engine, File file) {
|
||||
final List<Dependency> before = new ArrayList<Dependency>(engine.getDependencies());
|
||||
engine.scan(file);
|
||||
final List<Dependency> after = engine.getDependencies();
|
||||
final boolean sizeChanged = before.size() != after.size();
|
||||
final Set<Dependency> newDependencies;
|
||||
if (sizeChanged) {
|
||||
//get the new dependencies
|
||||
newDependencies = new HashSet<Dependency>(after);
|
||||
newDependencies.removeAll(before);
|
||||
} else {
|
||||
newDependencies = EMPTY_DEPENDENCY_SET;
|
||||
}
|
||||
return newDependencies;
|
||||
private static List<Dependency> findMoreDependencies(Engine engine, File file) {
|
||||
final List<Dependency> added = engine.scan(file);
|
||||
return added;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -357,32 +380,49 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
private void extractFiles(File archive, File destination, Engine engine) throws AnalysisException {
|
||||
if (archive != null && destination != null) {
|
||||
FileInputStream fis;
|
||||
String archiveExt = FileUtils.getFileExtension(archive.getName());
|
||||
if (archiveExt == null) {
|
||||
return;
|
||||
}
|
||||
archiveExt = archiveExt.toLowerCase();
|
||||
|
||||
final FileInputStream fis;
|
||||
try {
|
||||
fis = new FileInputStream(archive);
|
||||
} catch (FileNotFoundException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new AnalysisException("Archive file was not found.", ex);
|
||||
}
|
||||
final String archiveExt = FileUtils.getFileExtension(archive.getName()).toLowerCase();
|
||||
BufferedInputStream in = null;
|
||||
ZipArchiveInputStream zin = null;
|
||||
TarArchiveInputStream tin = null;
|
||||
GzipCompressorInputStream gin = null;
|
||||
BZip2CompressorInputStream bzin = null;
|
||||
try {
|
||||
if (ZIPPABLES.contains(archiveExt)) {
|
||||
final BufferedInputStream in = new BufferedInputStream(fis);
|
||||
in = new BufferedInputStream(fis);
|
||||
ensureReadableJar(archiveExt, in);
|
||||
extractArchive(new ZipArchiveInputStream(in), destination, engine);
|
||||
zin = new ZipArchiveInputStream(in);
|
||||
extractArchive(zin, destination, engine);
|
||||
} else if ("tar".equals(archiveExt)) {
|
||||
extractArchive(new TarArchiveInputStream(new BufferedInputStream(fis)), destination, engine);
|
||||
in = new BufferedInputStream(fis);
|
||||
tin = new TarArchiveInputStream(in);
|
||||
extractArchive(tin, destination, engine);
|
||||
} else if ("gz".equals(archiveExt) || "tgz".equals(archiveExt)) {
|
||||
final String uncompressedName = GzipUtils.getUncompressedFilename(archive.getName());
|
||||
final File f = new File(destination, uncompressedName);
|
||||
if (engine.accept(f)) {
|
||||
decompressFile(new GzipCompressorInputStream(new BufferedInputStream(fis)), f);
|
||||
in = new BufferedInputStream(fis);
|
||||
gin = new GzipCompressorInputStream(in);
|
||||
decompressFile(gin, f);
|
||||
}
|
||||
} else if ("bz2".equals(archiveExt) || "tbz2".equals(archiveExt)) {
|
||||
final String uncompressedName = BZip2Utils.getUncompressedFilename(archive.getName());
|
||||
final File f = new File(destination, uncompressedName);
|
||||
if (engine.accept(f)) {
|
||||
decompressFile(new BZip2CompressorInputStream(new BufferedInputStream(fis)), f);
|
||||
in = new BufferedInputStream(fis);
|
||||
bzin = new BZip2CompressorInputStream(in);
|
||||
decompressFile(bzin, f);
|
||||
}
|
||||
}
|
||||
} catch (ArchiveExtractionException ex) {
|
||||
@@ -392,7 +432,14 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.warn("Exception reading archive '{}'.", archive.getName());
|
||||
LOGGER.debug("", ex);
|
||||
} finally {
|
||||
//overly verbose and not needed... but keeping it anyway due to
|
||||
//having issue with file handles being left open
|
||||
close(fis);
|
||||
close(in);
|
||||
close(zin);
|
||||
close(tin);
|
||||
close(gin);
|
||||
close(bzin);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -414,8 +461,9 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if ("jar".equals(archiveExt) && in.markSupported()) {
|
||||
in.mark(7);
|
||||
final byte[] b = new byte[7];
|
||||
in.read(b);
|
||||
if (b[0] == '#'
|
||||
final int read = in.read(b);
|
||||
if (read == 7
|
||||
&& b[0] == '#'
|
||||
&& b[1] == '!'
|
||||
&& b[2] == '/'
|
||||
&& b[3] == 'b'
|
||||
@@ -441,6 +489,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
in.reset();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,6 +45,7 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.apache.commons.lang3.SystemUtils;
|
||||
|
||||
/**
|
||||
* Analyzer for getting company, product, and version information from a .NET
|
||||
@@ -71,10 +72,6 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* The temp value for GrokAssembly.exe
|
||||
*/
|
||||
private File grokAssemblyExe = null;
|
||||
/**
|
||||
* The DocumentBuilder for parsing the XML
|
||||
*/
|
||||
private DocumentBuilder builder;
|
||||
/**
|
||||
* Logger
|
||||
*/
|
||||
@@ -85,18 +82,19 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*
|
||||
* @return the list of arguments to begin populating the ProcessBuilder
|
||||
*/
|
||||
private List<String> buildArgumentList() {
|
||||
protected List<String> buildArgumentList() {
|
||||
// Use file.separator as a wild guess as to whether this is Windows
|
||||
final List<String> args = new ArrayList<String>();
|
||||
if (!"\\".equals(System.getProperty("file.separator"))) {
|
||||
if (!SystemUtils.IS_OS_WINDOWS) {
|
||||
if (Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH) != null) {
|
||||
args.add(Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH));
|
||||
} else {
|
||||
} else if (isInPath("mono")) {
|
||||
args.add("mono");
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
args.add(grokAssemblyExe.getPath());
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
@@ -116,12 +114,17 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
final List<String> args = buildArgumentList();
|
||||
if (args == null) {
|
||||
LOGGER.warn("Assembly Analyzer was unable to execute");
|
||||
return;
|
||||
}
|
||||
args.add(dependency.getActualFilePath());
|
||||
final ProcessBuilder pb = new ProcessBuilder(args);
|
||||
Document doc = null;
|
||||
try {
|
||||
final Process proc = pb.start();
|
||||
|
||||
final DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
|
||||
doc = builder.parse(proc.getInputStream());
|
||||
|
||||
// Try evacuating the error stream
|
||||
@@ -170,6 +173,8 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
product, Confidence.HIGH));
|
||||
}
|
||||
|
||||
} catch (ParserConfigurationException pce) {
|
||||
throw new AnalysisException("Error initializing the assembly analyzer", pce);
|
||||
} catch (IOException ioe) {
|
||||
throw new AnalysisException(ioe);
|
||||
} catch (SAXException saxe) {
|
||||
@@ -203,8 +208,6 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
IOUtils.copy(is, fos);
|
||||
|
||||
grokAssemblyExe = tempFile;
|
||||
// Set the temp file to get deleted when we're done
|
||||
grokAssemblyExe.deleteOnExit();
|
||||
LOGGER.debug("Extracted GrokAssembly.exe to {}", grokAssemblyExe.getPath());
|
||||
} catch (IOException ioe) {
|
||||
this.setEnabled(false);
|
||||
@@ -229,13 +232,32 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
// Now, need to see if GrokAssembly actually runs from this location.
|
||||
final List<String> args = buildArgumentList();
|
||||
//TODO this creates an "unreported" error - if someone doesn't look
|
||||
// at the command output this could easily be missed (especially in an
|
||||
// Ant or Maven build.
|
||||
//
|
||||
// We need to create a non-fatal warning error type that will
|
||||
// get added to the report.
|
||||
//TOOD this idea needs to get replicated to the bundle audit analyzer.
|
||||
if (args == null) {
|
||||
setEnabled(false);
|
||||
LOGGER.error("----------------------------------------------------");
|
||||
LOGGER.error(".NET Assembly Analyzer could not be initialized and at least one "
|
||||
+ "'exe' or 'dll' was scanned. The 'mono' executable could not be found on "
|
||||
+ "the path; either disable the Assembly Analyzer or configure the path mono.");
|
||||
LOGGER.error("----------------------------------------------------");
|
||||
return;
|
||||
}
|
||||
try {
|
||||
final ProcessBuilder pb = new ProcessBuilder(args);
|
||||
final Process p = pb.start();
|
||||
// Try evacuating the error stream
|
||||
IOUtils.copy(p.getErrorStream(), NullOutputStream.NULL_OUTPUT_STREAM);
|
||||
|
||||
final Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(p.getInputStream());
|
||||
final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
final DocumentBuilder builder = factory.newDocumentBuilder();
|
||||
final Document doc = builder.parse(p.getInputStream());
|
||||
final XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
final String error = xpath.evaluate("/assembly/error", doc);
|
||||
if (p.waitFor() != 1 || error == null || error.isEmpty()) {
|
||||
@@ -246,6 +268,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
throw new InitializationException("Could not execute .NET AssemblyAnalyzer");
|
||||
}
|
||||
} catch (InitializationException e) {
|
||||
setEnabled(false);
|
||||
throw e;
|
||||
} catch (Throwable e) {
|
||||
LOGGER.warn("An error occurred with the .NET AssemblyAnalyzer;\n"
|
||||
@@ -254,12 +277,6 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("An error occurred with the .NET AssemblyAnalyzer", e);
|
||||
}
|
||||
try {
|
||||
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
|
||||
} catch (ParserConfigurationException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Error initializing the assembly analyzer", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -272,10 +289,12 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
super.close();
|
||||
try {
|
||||
if (grokAssemblyExe != null && !grokAssemblyExe.delete()) {
|
||||
LOGGER.debug("Unable to delete temporary GrokAssembly.exe; attempting delete on exit");
|
||||
grokAssemblyExe.deleteOnExit();
|
||||
}
|
||||
} catch (SecurityException se) {
|
||||
LOGGER.debug("Can't delete temporary GrokAssembly.exe");
|
||||
grokAssemblyExe.deleteOnExit();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -320,4 +339,29 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests to see if a file is in the system path. <b>Note</b> - the current
|
||||
* implementation only works on non-windows platforms. For purposes of the
|
||||
* AssemblyAnalyzer this is okay as this is only needed on Mac/*nix.
|
||||
*
|
||||
* @param file the executable to look for
|
||||
* @return <code>true</code> if the file exists; otherwise
|
||||
* <code>false</code>
|
||||
*/
|
||||
private boolean isInPath(String file) {
|
||||
final ProcessBuilder pb = new ProcessBuilder("which", file);
|
||||
try {
|
||||
final Process proc = pb.start();
|
||||
final int retCode = proc.waitFor();
|
||||
if (retCode == 0) {
|
||||
return true;
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Path seach failed for " + file);
|
||||
} catch (InterruptedException ex) {
|
||||
LOGGER.debug("Path seach failed for " + file);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,8 +31,6 @@ import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
@@ -178,11 +176,7 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// copy, alter and set in case some other thread is iterating over
|
||||
final List<Dependency> dependencies = new ArrayList<Dependency>(
|
||||
engine.getDependencies());
|
||||
dependencies.remove(dependency);
|
||||
engine.setDependencies(dependencies);
|
||||
engine.getDependencies().remove(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -92,24 +92,10 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(".cmake")
|
||||
.addFilenames("CMakeLists.txt").build();
|
||||
|
||||
/**
|
||||
* A reference to SHA1 message digest.
|
||||
*/
|
||||
private static MessageDigest sha1 = null;
|
||||
|
||||
static {
|
||||
try {
|
||||
sha1 = MessageDigest.getInstance("SHA1");
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
LOGGER.error(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the CMake analyzer.
|
||||
*
|
||||
* @return the name of the analyzer
|
||||
*
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
@@ -137,13 +123,19 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* No-op initializer implementation.
|
||||
* Initializes the analyzer.
|
||||
*
|
||||
* @throws InitializationException never thrown
|
||||
* @throws InitializationException thrown if an exception occurs getting an
|
||||
* instance of SHA1
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
// Nothing to do here.
|
||||
try {
|
||||
getSha1MessageDigest();
|
||||
} catch (IllegalStateException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unable to create SHA1 MessageDigest", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -196,6 +188,9 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @param engine the dependency-check engine
|
||||
* @param contents the version information
|
||||
*/
|
||||
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings(
|
||||
value = "DM_DEFAULT_ENCODING",
|
||||
justification = "Default encoding is only used if UTF-8 is not available")
|
||||
private void analyzeSetVersionCommand(Dependency dependency, Engine engine, String contents) {
|
||||
Dependency currentDep = dependency;
|
||||
|
||||
@@ -226,6 +221,7 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
} catch (UnsupportedEncodingException ex) {
|
||||
path = filePath.getBytes();
|
||||
}
|
||||
final MessageDigest sha1 = getSha1MessageDigest();
|
||||
currentDep.setSha1sum(Checksum.getHex(sha1.digest(path)));
|
||||
engine.getDependencies().add(currentDep);
|
||||
}
|
||||
@@ -242,4 +238,18 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_CMAKE_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the sha1 message digest.
|
||||
*
|
||||
* @return the sha1 message digest
|
||||
*/
|
||||
private MessageDigest getSha1MessageDigest() {
|
||||
try {
|
||||
return MessageDigest.getInstance("SHA1");
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
LOGGER.error(e.getMessage());
|
||||
throw new IllegalStateException("Failed to obtain the SHA1 message digest.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,6 +25,8 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.StringTokenizer;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.apache.commons.lang3.builder.CompareToBuilder;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.queryparser.classic.ParseException;
|
||||
@@ -58,7 +60,7 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class CPEAnalyzer implements Analyzer {
|
||||
public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger.
|
||||
@@ -154,10 +156,10 @@ public class CPEAnalyzer implements Analyzer {
|
||||
cve.open();
|
||||
cpe = CpeMemoryIndex.getInstance();
|
||||
try {
|
||||
LOGGER.info("Creating the CPE Index");
|
||||
final long creationStart = System.currentTimeMillis();
|
||||
cpe.open(cve);
|
||||
LOGGER.info("CPE Index Created ({} ms)", System.currentTimeMillis() - creationStart);
|
||||
final long creationSeconds = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - creationStart);
|
||||
LOGGER.info("Created CPE Index ({} seconds)", creationSeconds);
|
||||
} catch (IndexException ex) {
|
||||
LOGGER.debug("IndexException", ex);
|
||||
throw new DatabaseException(ex);
|
||||
@@ -550,7 +552,7 @@ public class CPEAnalyzer implements Analyzer {
|
||||
final List<IdentifierMatch> collected = new ArrayList<IdentifierMatch>();
|
||||
|
||||
//TODO the following algorithm incorrectly identifies things as a lower version
|
||||
// if there lower confidence evidence when the current (highest) version number
|
||||
// if there lower confidence evidence when the current (highest) version number
|
||||
// is newer then anything in the NVD.
|
||||
for (Confidence conf : Confidence.values()) {
|
||||
for (Evidence evidence : dependency.getVersionEvidence().iterator(conf)) {
|
||||
@@ -801,6 +803,12 @@ public class CPEAnalyzer implements Analyzer {
|
||||
*/
|
||||
@Override
|
||||
public int compareTo(IdentifierMatch o) {
|
||||
return new CompareToBuilder()
|
||||
.append(confidence, o.confidence)
|
||||
.append(evidenceConfidence, o.evidenceConfidence)
|
||||
.append(identifier, o.identifier)
|
||||
.toComparison();
|
||||
/*
|
||||
int conf = this.confidence.compareTo(o.confidence);
|
||||
if (conf == 0) {
|
||||
conf = this.evidenceConfidence.compareTo(o.evidenceConfidence);
|
||||
@@ -809,6 +817,7 @@ public class CPEAnalyzer implements Analyzer {
|
||||
}
|
||||
}
|
||||
return conf;
|
||||
*/
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,7 +75,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* The analyzer should be disabled if there are errors, so this is a flag to
|
||||
* determine if such an error has occurred.
|
||||
*/
|
||||
private boolean errorFlag = false;
|
||||
private volatile boolean errorFlag = false;
|
||||
|
||||
/**
|
||||
* The searcher itself.
|
||||
@@ -229,7 +229,8 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.warn("Unable to download pom.xml for {} from Central; "
|
||||
+ "this could result in undetected CPE/CVEs.", dependency.getFileName());
|
||||
} finally {
|
||||
if (pomFile != null && !FileUtils.deleteQuietly(pomFile)) {
|
||||
if (pomFile != null && pomFile.exists() && !FileUtils.deleteQuietly(pomFile)) {
|
||||
LOGGER.debug("Failed to delete temporary pom file {}", pomFile.toString());
|
||||
pomFile.deleteOnExit();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,205 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2016 IBM Corporation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
* This analyzer is used to analyze SWIFT and Objective-C packages by collecting
|
||||
* information from .podspec files. CocoaPods dependency manager see
|
||||
* https://cocoapods.org/.
|
||||
*
|
||||
* @author Bianca Jiang (https://twitter.com/biancajiang)
|
||||
*/
|
||||
@Experimental
|
||||
public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
// private static final Logger LOGGER = LoggerFactory.getLogger(CocoaPodsAnalyzer.class);
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "CocoaPods Package Analyzer";
|
||||
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
|
||||
|
||||
/**
|
||||
* The file name to scan.
|
||||
*/
|
||||
public static final String PODSPEC = "podspec";
|
||||
/**
|
||||
* Filter that detects files named "*.podspec".
|
||||
*/
|
||||
private static final FileFilter PODSPEC_FILTER = FileFilterBuilder.newInstance().addExtensions(PODSPEC).build();
|
||||
|
||||
/**
|
||||
* The capture group #1 is the block variable. e.g. "Pod::Spec.new do
|
||||
* |spec|"
|
||||
*/
|
||||
private static final Pattern PODSPEC_BLOCK_PATTERN = Pattern.compile("Pod::Spec\\.new\\s+?do\\s+?\\|(.+?)\\|");
|
||||
|
||||
/**
|
||||
* Returns the FileFilter
|
||||
*
|
||||
* @return the FileFilter
|
||||
*/
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return PODSPEC_FILTER;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() {
|
||||
// NO-OP
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase that the analyzer is intended to run in.
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_COCOAPODS_ENABLED;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeFileType(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
|
||||
String contents;
|
||||
try {
|
||||
contents = FileUtils.readFileToString(dependency.getActualFile(), Charset.defaultCharset());
|
||||
} catch (IOException e) {
|
||||
throw new AnalysisException(
|
||||
"Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
final Matcher matcher = PODSPEC_BLOCK_PATTERN.matcher(contents);
|
||||
if (matcher.find()) {
|
||||
contents = contents.substring(matcher.end());
|
||||
final String blockVariable = matcher.group(1);
|
||||
|
||||
final EvidenceCollection vendor = dependency.getVendorEvidence();
|
||||
final EvidenceCollection product = dependency.getProductEvidence();
|
||||
final EvidenceCollection version = dependency.getVersionEvidence();
|
||||
|
||||
final String name = addStringEvidence(product, contents, blockVariable, "name", "name", Confidence.HIGHEST);
|
||||
if (!name.isEmpty()) {
|
||||
vendor.addEvidence(PODSPEC, "name_project", name, Confidence.HIGHEST);
|
||||
}
|
||||
addStringEvidence(product, contents, blockVariable, "summary", "summary", Confidence.HIGHEST);
|
||||
|
||||
addStringEvidence(vendor, contents, blockVariable, "author", "authors?", Confidence.HIGHEST);
|
||||
addStringEvidence(vendor, contents, blockVariable, "homepage", "homepage", Confidence.HIGHEST);
|
||||
addStringEvidence(vendor, contents, blockVariable, "license", "licen[cs]es?", Confidence.HIGHEST);
|
||||
|
||||
addStringEvidence(version, contents, blockVariable, "version", "version", Confidence.HIGHEST);
|
||||
}
|
||||
|
||||
setPackagePath(dependency);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts evidence from the contents and adds it to the given evidence
|
||||
* collection.
|
||||
*
|
||||
* @param evidences the evidence collection to update
|
||||
* @param contents the text to extract evidence from
|
||||
* @param blockVariable the block variable within the content to search for
|
||||
* @param field the name of the field being searched for
|
||||
* @param fieldPattern the field pattern within the contents to search for
|
||||
* @param confidence the confidence level of the evidence if found
|
||||
* @return the string that was added as evidence
|
||||
*/
|
||||
private String addStringEvidence(EvidenceCollection evidences, String contents,
|
||||
String blockVariable, String field, String fieldPattern, Confidence confidence) {
|
||||
String value = "";
|
||||
|
||||
//capture array value between [ ]
|
||||
final Matcher arrayMatcher = Pattern.compile(
|
||||
String.format("\\s*?%s\\.%s\\s*?=\\s*?\\{\\s*?(.*?)\\s*?\\}", blockVariable, fieldPattern),
|
||||
Pattern.CASE_INSENSITIVE).matcher(contents);
|
||||
if (arrayMatcher.find()) {
|
||||
value = arrayMatcher.group(1);
|
||||
} else { //capture single value between quotes
|
||||
final Matcher matcher = Pattern.compile(
|
||||
String.format("\\s*?%s\\.%s\\s*?=\\s*?(['\"])(.*?)\\1", blockVariable, fieldPattern),
|
||||
Pattern.CASE_INSENSITIVE).matcher(contents);
|
||||
if (matcher.find()) {
|
||||
value = matcher.group(2);
|
||||
}
|
||||
}
|
||||
if (value.length() > 0) {
|
||||
evidences.addEvidence(PODSPEC, field, value, confidence);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the package path on the given dependency.
|
||||
*
|
||||
* @param dep the dependency to update
|
||||
*/
|
||||
private void setPackagePath(Dependency dep) {
|
||||
final File file = new File(dep.getFilePath());
|
||||
final String parent = file.getParent();
|
||||
if (parent != null) {
|
||||
dep.setPackagePath(parent);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -24,6 +24,7 @@ import org.owasp.dependencycheck.data.composer.ComposerException;
|
||||
import org.owasp.dependencycheck.data.composer.ComposerLockParser;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.Checksum;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
@@ -36,7 +37,6 @@ import java.io.FileNotFoundException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
* Used to analyze a composer.lock file for a composer PHP app.
|
||||
@@ -85,19 +85,13 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
sha1 = MessageDigest.getInstance("SHA1");
|
||||
} catch (NoSuchAlgorithmException ex) {
|
||||
getSha1MessageDigest();
|
||||
} catch (IllegalStateException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unable to create SHA1 MmessageDigest", ex);
|
||||
throw new InitializationException("Unable to create SHA1 MessageDigest", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The MessageDigest for calculating a new digest for the new dependencies
|
||||
* added.
|
||||
*/
|
||||
private MessageDigest sha1 = null;
|
||||
|
||||
/**
|
||||
* Entry point for the analyzer.
|
||||
*
|
||||
@@ -117,6 +111,7 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final Dependency d = new Dependency(dependency.getActualFile());
|
||||
d.setDisplayFileName(String.format("%s:%s/%s", dependency.getDisplayFileName(), dep.getGroup(), dep.getProject()));
|
||||
final String filePath = String.format("%s:%s/%s", dependency.getFilePath(), dep.getGroup(), dep.getProject());
|
||||
final MessageDigest sha1 = getSha1MessageDigest();
|
||||
d.setFilePath(filePath);
|
||||
d.setSha1sum(Checksum.getHex(sha1.digest(filePath.getBytes(Charset.defaultCharset()))));
|
||||
d.getVendorEvidence().addEvidence(COMPOSER_LOCK, "vendor", dep.getGroup(), Confidence.HIGHEST);
|
||||
@@ -169,4 +164,18 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return AnalysisPhase.INFORMATION_COLLECTION;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the sha1 message digest.
|
||||
*
|
||||
* @return the sha1 message digest
|
||||
*/
|
||||
private MessageDigest getSha1MessageDigest() {
|
||||
try {
|
||||
return MessageDigest.getInstance("SHA1");
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
LOGGER.error(e.getMessage());
|
||||
throw new IllegalStateException("Failed to obtain the SHA1 message digest.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger.
|
||||
@@ -58,10 +58,23 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
* A pattern for obtaining the first part of a filename.
|
||||
*/
|
||||
private static final Pattern STARTING_TEXT_PATTERN = Pattern.compile("^[a-zA-Z0-9]*");
|
||||
|
||||
/**
|
||||
* a flag indicating if this analyzer has run. This analyzer only runs once.
|
||||
*/
|
||||
private boolean analyzed = false;
|
||||
|
||||
/**
|
||||
* Returns a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once. Note this is currently only used in the unit tests.
|
||||
*
|
||||
* @return a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once
|
||||
*/
|
||||
protected boolean getAnalyzed() {
|
||||
return analyzed;
|
||||
}
|
||||
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
@@ -94,6 +107,18 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Does not support parallel processing as it only runs once and then
|
||||
* operates on <em>all</em> dependencies.
|
||||
*
|
||||
* @return whether or not parallel processing is enabled
|
||||
* @see #analyze(Dependency, Engine)
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a set of dependencies. If they have been found to have the same
|
||||
* base path and the same set of identifiers they are likely related. The
|
||||
@@ -117,6 +142,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex());
|
||||
while (subIterator.hasNext()) {
|
||||
final Dependency nextDependency = subIterator.next();
|
||||
Dependency main = null;
|
||||
if (hashesMatch(dependency, nextDependency) && !containedInWar(dependency.getFilePath())
|
||||
&& !containedInWar(nextDependency.getFilePath())) {
|
||||
if (firstPathIsShortest(dependency.getFilePath(), nextDependency.getFilePath())) {
|
||||
@@ -143,8 +169,14 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
} else if (isSameRubyGem(dependency, nextDependency)) {
|
||||
final Dependency main = getMainGemspecDependency(dependency, nextDependency);
|
||||
} else if ((main = getMainGemspecDependency(dependency, nextDependency)) != null) {
|
||||
if (main == dependency) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
} else if ((main = getMainSwiftDependency(dependency, nextDependency)) != null) {
|
||||
if (main == dependency) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
@@ -302,10 +334,13 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
String right = rFile.getParent();
|
||||
if (left == null) {
|
||||
return right == null;
|
||||
} else if (right == null) {
|
||||
return false;
|
||||
}
|
||||
if (left.equalsIgnoreCase(right)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (left.matches(".*[/\\\\]repository[/\\\\].*") && right.matches(".*[/\\\\]repository[/\\\\].*")) {
|
||||
left = getBaseRepoPath(left);
|
||||
right = getBaseRepoPath(right);
|
||||
@@ -340,11 +375,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
|| dependency2.getPackagePath() == null) {
|
||||
return false;
|
||||
}
|
||||
if (dependency1.getPackagePath().equalsIgnoreCase(dependency2.getPackagePath())) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
return dependency1.getPackagePath().equalsIgnoreCase(dependency2.getPackagePath());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -376,6 +407,46 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bundling same swift dependencies with the same packagePath but identified
|
||||
* by different analyzers.
|
||||
*
|
||||
* @param dependency1 dependency to test
|
||||
* @param dependency2 dependency to test
|
||||
* @return <code>true</code> if the dependencies appear to be the same;
|
||||
* otherwise <code>false</code>
|
||||
*/
|
||||
private boolean isSameSwiftPackage(Dependency dependency1, Dependency dependency2) {
|
||||
if (dependency1 == null || dependency2 == null
|
||||
|| (!dependency1.getFileName().endsWith(".podspec")
|
||||
&& !dependency1.getFileName().equals("Package.swift"))
|
||||
|| (!dependency2.getFileName().endsWith(".podspec")
|
||||
&& !dependency2.getFileName().equals("Package.swift"))
|
||||
|| dependency1.getPackagePath() == null
|
||||
|| dependency2.getPackagePath() == null) {
|
||||
return false;
|
||||
}
|
||||
return dependency1.getPackagePath().equalsIgnoreCase(dependency2.getPackagePath());
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines which of the swift dependencies should be considered the
|
||||
* primary.
|
||||
*
|
||||
* @param dependency1 the first swift dependency to compare
|
||||
* @param dependency2 the second swift dependency to compare
|
||||
* @return the primary swift dependency
|
||||
*/
|
||||
private Dependency getMainSwiftDependency(Dependency dependency1, Dependency dependency2) {
|
||||
if (isSameSwiftPackage(dependency1, dependency2)) {
|
||||
if (dependency1.getFileName().endsWith(".podspec")) {
|
||||
return dependency1;
|
||||
}
|
||||
return dependency2;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is likely a very broken attempt at determining if the 'left'
|
||||
* dependency is the 'core' library in comparison to the 'right' library.
|
||||
@@ -464,6 +535,9 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
* <code>false</code>
|
||||
*/
|
||||
protected boolean firstPathIsShortest(String left, String right) {
|
||||
if (left.contains("dctemp")) {
|
||||
return false;
|
||||
}
|
||||
final String leftPath = left.replace('\\', '/');
|
||||
final String rightPath = right.replace('\\', '/');
|
||||
|
||||
|
||||
@@ -423,28 +423,30 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
String parentPath = dependency.getFilePath().toLowerCase();
|
||||
if (parentPath.contains(".jar")) {
|
||||
parentPath = parentPath.substring(0, parentPath.indexOf(".jar") + 4);
|
||||
final Dependency parent = findDependency(parentPath, engine.getDependencies());
|
||||
if (parent != null) {
|
||||
boolean remove = false;
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
if ("cpe".equals(i.getType())) {
|
||||
final String trimmedCPE = trimCpeToVendor(i.getValue());
|
||||
for (Identifier parentId : parent.getIdentifiers()) {
|
||||
if ("cpe".equals(parentId.getType()) && parentId.getValue().startsWith(trimmedCPE)) {
|
||||
remove |= true;
|
||||
final List<Dependency> dependencies = engine.getDependencies();
|
||||
synchronized (dependencies) {
|
||||
final Dependency parent = findDependency(parentPath, dependencies);
|
||||
if (parent != null) {
|
||||
boolean remove = false;
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
if ("cpe".equals(i.getType())) {
|
||||
final String trimmedCPE = trimCpeToVendor(i.getValue());
|
||||
for (Identifier parentId : parent.getIdentifiers()) {
|
||||
if ("cpe".equals(parentId.getType()) && parentId.getValue().startsWith(trimmedCPE)) {
|
||||
remove |= true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!remove) { //we can escape early
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (!remove) { //we can escape early
|
||||
return;
|
||||
if (remove) {
|
||||
dependencies.remove(dependency);
|
||||
}
|
||||
}
|
||||
if (remove) {
|
||||
engine.getDependencies().remove(dependency);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ import org.owasp.dependencycheck.utils.DependencyVersionUtil;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class FileNameAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
public class FileNameAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
@@ -70,11 +70,12 @@ public class FileNameAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
/**
|
||||
* Python init files
|
||||
*/
|
||||
//CSOFF: WhitespaceAfter
|
||||
private static final NameFileFilter IGNORED_FILES = new NameFileFilter(new String[]{
|
||||
"__init__.py",
|
||||
"__init__.pyc",
|
||||
"__init__.pyo",
|
||||
});
|
||||
"__init__.pyo",});
|
||||
//CSON: WhitespaceAfter
|
||||
|
||||
/**
|
||||
* Collects information about the file name.
|
||||
@@ -93,26 +94,27 @@ public class FileNameAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
|
||||
//add version evidence
|
||||
final DependencyVersion version = DependencyVersionUtil.parseVersion(fileName);
|
||||
final String packageName = DependencyVersionUtil.parsePreVersion(fileName);
|
||||
if (version != null) {
|
||||
// If the version number is just a number like 2 or 23, reduce the confidence
|
||||
// a shade. This should hopefully correct for cases like log4j.jar or
|
||||
// struts2-core.jar
|
||||
if (version.getVersionParts() == null || version.getVersionParts().size() < 2) {
|
||||
dependency.getVersionEvidence().addEvidence("file", "name",
|
||||
dependency.getVersionEvidence().addEvidence("file", "version",
|
||||
version.toString(), Confidence.MEDIUM);
|
||||
} else {
|
||||
dependency.getVersionEvidence().addEvidence("file", "version",
|
||||
version.toString(), Confidence.HIGHEST);
|
||||
}
|
||||
dependency.getVersionEvidence().addEvidence("file", "name",
|
||||
fileName, Confidence.MEDIUM);
|
||||
packageName, Confidence.MEDIUM);
|
||||
}
|
||||
|
||||
if (!IGNORED_FILES.accept(f)) {
|
||||
dependency.getProductEvidence().addEvidence("file", "name",
|
||||
fileName, Confidence.HIGH);
|
||||
packageName, Confidence.HIGH);
|
||||
dependency.getVendorEvidence().addEvidence("file", "name",
|
||||
fileName, Confidence.HIGH);
|
||||
packageName, Confidence.HIGH);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ import org.xml.sax.SAXException;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class HintAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
public class HintAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
@@ -154,6 +154,9 @@ public class HintAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
for (Evidence e : hint.getAddProduct()) {
|
||||
dependency.getProductEvidence().addEvidence(e);
|
||||
}
|
||||
for (Evidence e : hint.getAddVersion()) {
|
||||
dependency.getVersionEvidence().addEvidence(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -311,14 +314,21 @@ public class HintAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
} else {
|
||||
file = new File(filePath);
|
||||
if (!file.exists()) {
|
||||
final InputStream fromClasspath = this.getClass().getClassLoader().getResourceAsStream(filePath);
|
||||
if (fromClasspath != null) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("hint", "xml");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyInputStreamToFile(fromClasspath, file);
|
||||
} catch (IOException ex) {
|
||||
throw new HintParseException("Unable to locate suppressions file in classpath", ex);
|
||||
InputStream fromClasspath = null;
|
||||
try {
|
||||
fromClasspath = this.getClass().getClassLoader().getResourceAsStream(filePath);
|
||||
if (fromClasspath != null) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("hint", "xml");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyInputStreamToFile(fromClasspath, file);
|
||||
} catch (IOException ex) {
|
||||
throw new HintParseException("Unable to locate hints file in classpath", ex);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (fromClasspath != null) {
|
||||
fromClasspath.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,7 +26,6 @@ import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.io.Reader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
@@ -35,6 +34,7 @@ import java.util.Map.Entry;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.StringTokenizer;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.jar.Attributes;
|
||||
import java.util.jar.JarEntry;
|
||||
import java.util.jar.JarFile;
|
||||
@@ -76,7 +76,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* The count of directories created during analysis. This is used for
|
||||
* creating temporary directories.
|
||||
*/
|
||||
private static int dirCount = 0;
|
||||
private static final AtomicInteger DIR_COUNT = new AtomicInteger(0);
|
||||
/**
|
||||
* The system independent newline character.
|
||||
*/
|
||||
@@ -318,15 +318,16 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
pom.processProperties(pomProperties);
|
||||
setPomEvidence(newDependency, pom, null);
|
||||
engine.getDependencies().add(newDependency);
|
||||
Collections.sort(engine.getDependencies());
|
||||
} else {
|
||||
if (externalPom == null) {
|
||||
pom = PomUtils.readPom(path, jar);
|
||||
} else {
|
||||
pom = PomUtils.readPom(externalPom);
|
||||
}
|
||||
pom.processProperties(pomProperties);
|
||||
foundSomething |= setPomEvidence(dependency, pom, classes);
|
||||
if (pom != null) {
|
||||
pom.processProperties(pomProperties);
|
||||
foundSomething |= setPomEvidence(dependency, pom, classes);
|
||||
}
|
||||
}
|
||||
} catch (AnalysisException ex) {
|
||||
LOGGER.warn("An error occurred while analyzing '{}'.", dependency.getActualFilePath());
|
||||
@@ -409,6 +410,9 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final File file = new File(tmpDir, "pom.xml");
|
||||
try {
|
||||
final ZipEntry entry = jar.getEntry(path);
|
||||
if (entry == null) {
|
||||
throw new AnalysisException(String.format("Pom (%s)does not exist in %s", path, jar.getName()));
|
||||
}
|
||||
input = jar.getInputStream(entry);
|
||||
fos = new FileOutputStream(file);
|
||||
IOUtils.copy(input, fos);
|
||||
@@ -487,7 +491,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
final String originalGroupID = groupid;
|
||||
if (groupid.startsWith("org.") || groupid.startsWith("com.")) {
|
||||
if (groupid != null && (groupid.startsWith("org.") || groupid.startsWith("com."))) {
|
||||
groupid = groupid.substring(4);
|
||||
}
|
||||
|
||||
@@ -496,7 +500,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
final String originalArtifactID = artifactid;
|
||||
if (artifactid.startsWith("org.") || artifactid.startsWith("com.")) {
|
||||
if (artifactid != null && (artifactid.startsWith("org.") || artifactid.startsWith("com."))) {
|
||||
artifactid = artifactid.substring(4);
|
||||
}
|
||||
|
||||
@@ -685,7 +689,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
foundSomething = true;
|
||||
versionEvidence.addEvidence(source, key, value, Confidence.HIGH);
|
||||
} else if ("specification-version".equalsIgnoreCase(key)) {
|
||||
specificationVersion = key;
|
||||
specificationVersion = value;
|
||||
} else if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_VENDOR.toString())) {
|
||||
foundSomething = true;
|
||||
vendorEvidence.addEvidence(source, key, value, Confidence.HIGH);
|
||||
@@ -704,17 +708,12 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
addMatchingValues(classInformation, value, productEvidence);
|
||||
// //the following caused false positives.
|
||||
// } else if (key.equalsIgnoreCase(BUNDLE_VENDOR)) {
|
||||
// foundSomething = true;
|
||||
// vendorEvidence.addEvidence(source, key, value, Confidence.HIGH);
|
||||
// addMatchingValues(classInformation, value, vendorEvidence);
|
||||
} else if (key.equalsIgnoreCase(BUNDLE_VERSION)) {
|
||||
foundSomething = true;
|
||||
versionEvidence.addEvidence(source, key, value, Confidence.HIGH);
|
||||
} else if (key.equalsIgnoreCase(Attributes.Name.MAIN_CLASS.toString())) {
|
||||
continue;
|
||||
//skipping main class as if this has important information to add
|
||||
// it will be added during class name analysis... if other fields
|
||||
// have the information from the class name then they will get added...
|
||||
//skipping main class as if this has important information to add it will be added during class name analysis...
|
||||
} else {
|
||||
key = key.toLowerCase();
|
||||
if (!IGNORE_KEYS.contains(key)
|
||||
@@ -932,8 +931,11 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if (tempFileLocation != null && tempFileLocation.exists()) {
|
||||
LOGGER.debug("Attempting to delete temporary files");
|
||||
final boolean success = FileUtils.delete(tempFileLocation);
|
||||
if (!success) {
|
||||
LOGGER.warn("Failed to delete some temporary files, see the log for more details");
|
||||
if (!success && tempFileLocation.exists()) {
|
||||
final String[] l = tempFileLocation.list();
|
||||
if (l != null && l.length > 0) {
|
||||
LOGGER.warn("Failed to delete some temporary files, see the log for more details");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1218,7 +1220,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException thrown if unable to create temporary directory
|
||||
*/
|
||||
private File getNextTempDirectory() throws AnalysisException {
|
||||
dirCount += 1;
|
||||
final int dirCount = DIR_COUNT.incrementAndGet();
|
||||
final File directory = new File(tempFileLocation, String.valueOf(dirCount));
|
||||
//getting an exception for some directories not being able to be created; might be because the directory already exists?
|
||||
if (directory.exists()) {
|
||||
|
||||
@@ -87,6 +87,7 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
private static final String SUPPORTED_EXTENSIONS = "jar";
|
||||
|
||||
private boolean useProxy;
|
||||
/**
|
||||
* The Nexus Search to be set up for this analyzer.
|
||||
*/
|
||||
@@ -144,10 +145,11 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.debug("Initializing Nexus Analyzer");
|
||||
LOGGER.debug("Nexus Analyzer enabled: {}", isEnabled());
|
||||
if (isEnabled()) {
|
||||
useProxy = useProxy();
|
||||
final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL);
|
||||
LOGGER.debug("Nexus Analyzer URL: {}", searchUrl);
|
||||
try {
|
||||
searcher = new NexusSearch(new URL(searchUrl));
|
||||
searcher = new NexusSearch(new URL(searchUrl), useProxy);
|
||||
if (!searcher.preflightRequest()) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("There was an issue getting Nexus status. Disabling analyzer.");
|
||||
@@ -245,7 +247,8 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.warn("Unable to download pom.xml for {} from Nexus repository; "
|
||||
+ "this could result in undetected CPE/CVEs.", dependency.getFileName());
|
||||
} finally {
|
||||
if (pomFile != null && !FileUtils.deleteQuietly(pomFile)) {
|
||||
if (pomFile != null && pomFile.exists() && !FileUtils.deleteQuietly(pomFile)) {
|
||||
LOGGER.debug("Failed to delete temporary pom file {}", pomFile.toString());
|
||||
pomFile.deleteOnExit();
|
||||
}
|
||||
}
|
||||
@@ -262,4 +265,19 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.debug("Could not connect to nexus repository", ioe);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if a proxy should be used.
|
||||
*
|
||||
* @return {@code true} if a proxy should be used
|
||||
*/
|
||||
public static boolean useProxy() {
|
||||
try {
|
||||
return Settings.getString(Settings.KEYS.PROXY_SERVER) != null
|
||||
&& Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY);
|
||||
} catch (InvalidSettingException ise) {
|
||||
LOGGER.warn("Failed to parse proxy settings.", ise);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class NvdCveAnalyzer implements Analyzer {
|
||||
public class NvdCveAnalyzer extends AbstractAnalyzer {
|
||||
/**
|
||||
* The Logger for use throughout the class
|
||||
*/
|
||||
|
||||
@@ -24,9 +24,9 @@ import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FilenameFilter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import org.apache.commons.io.filefilter.NameFileFilter;
|
||||
import org.apache.commons.io.filefilter.SuffixFileFilter;
|
||||
import org.apache.commons.io.input.AutoCloseInputStream;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
@@ -45,6 +45,7 @@ import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.FileUtils;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.UrlStringUtils;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
/**
|
||||
* Used to analyze a Wheel or egg distribution files, or their contents in
|
||||
@@ -76,7 +77,7 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* The count of directories created during analysis. This is used for
|
||||
* creating temporary directories.
|
||||
*/
|
||||
private static int dirCount = 0;
|
||||
private static final AtomicInteger DIR_COUNT = new AtomicInteger(0);
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
@@ -228,10 +229,13 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
throw new AnalysisException(ex);
|
||||
}
|
||||
|
||||
collectWheelMetadata(
|
||||
dependency,
|
||||
getMatchingFile(getMatchingFile(temp, folderFilter),
|
||||
metadataFilter));
|
||||
File matchingFile = getMatchingFile(temp, folderFilter);
|
||||
if (matchingFile != null) {
|
||||
matchingFile = getMatchingFile(matchingFile, metadataFilter);
|
||||
if (matchingFile != null) {
|
||||
collectWheelMetadata(dependency, matchingFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -273,9 +277,11 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if (tempFileLocation != null && tempFileLocation.exists()) {
|
||||
LOGGER.debug("Attempting to delete temporary files");
|
||||
final boolean success = FileUtils.delete(tempFileLocation);
|
||||
if (!success) {
|
||||
LOGGER.warn(
|
||||
"Failed to delete some temporary files, see the log for more details");
|
||||
if (!success && tempFileLocation.exists()) {
|
||||
final String[] l = tempFileLocation.list();
|
||||
if (l != null && l.length > 0) {
|
||||
LOGGER.warn("Failed to delete some temporary files, see the log for more details");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -354,13 +360,22 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if (null == manifest) {
|
||||
LOGGER.debug("Manifest file not found.");
|
||||
} else {
|
||||
InputStream in = null;
|
||||
try {
|
||||
result.load(new AutoCloseInputStream(new BufferedInputStream(
|
||||
new FileInputStream(manifest))));
|
||||
in = new BufferedInputStream(new FileInputStream(manifest));
|
||||
result.load(in);
|
||||
} catch (MessagingException e) {
|
||||
LOGGER.warn(e.getMessage(), e);
|
||||
} catch (FileNotFoundException e) {
|
||||
LOGGER.warn(e.getMessage(), e);
|
||||
} finally {
|
||||
if (in != null) {
|
||||
try {
|
||||
in.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("failed to close input stream", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
@@ -379,7 +394,7 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
// getting an exception for some directories not being able to be
|
||||
// created; might be because the directory already exists?
|
||||
do {
|
||||
dirCount += 1;
|
||||
final int dirCount = DIR_COUNT.incrementAndGet();
|
||||
directory = new File(tempFileLocation, String.valueOf(dirCount));
|
||||
} while (directory.exists());
|
||||
if (!directory.mkdirs()) {
|
||||
|
||||
@@ -33,8 +33,6 @@ import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
@@ -193,11 +191,7 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// copy, alter and set in case some other thread is iterating over
|
||||
final List<Dependency> dependencies = new ArrayList<Dependency>(
|
||||
engine.getDependencies());
|
||||
dependencies.remove(dependency);
|
||||
engine.setDependencies(dependencies);
|
||||
engine.getDependencies().remove(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -23,25 +23,26 @@ import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.nio.charset.Charset;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Reference;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
* Used to analyze Ruby Bundler Gemspec.lock files utilizing the 3rd party
|
||||
@@ -279,11 +280,16 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
final File parentFile = dependency.getActualFile().getParentFile();
|
||||
final Process process = launchBundleAudit(parentFile);
|
||||
final int exitValue;
|
||||
try {
|
||||
process.waitFor();
|
||||
exitValue = process.waitFor();
|
||||
} catch (InterruptedException ie) {
|
||||
throw new AnalysisException("bundle-audit process interrupted", ie);
|
||||
}
|
||||
if (exitValue < 0 || exitValue > 1) {
|
||||
final String msg = String.format("Unexpected exit code from bundle-audit process; exit code: %s", exitValue);
|
||||
throw new AnalysisException(msg);
|
||||
}
|
||||
BufferedReader rdr = null;
|
||||
BufferedReader errReader = null;
|
||||
try {
|
||||
@@ -482,7 +488,9 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
private Dependency createDependencyForGem(Engine engine, String parentName, String fileName, String filePath, String gem) throws IOException {
|
||||
final File gemFile = new File(Settings.getTempDirectory(), gem + "_Gemfile.lock");
|
||||
gemFile.createNewFile();
|
||||
if (!gemFile.createNewFile()) {
|
||||
throw new IOException("Unable to create temporary gem file");
|
||||
}
|
||||
final String displayFileName = String.format("%s%c%s:%s", parentName, File.separatorChar, fileName, gem);
|
||||
|
||||
FileUtils.write(gemFile, displayFileName, Charset.defaultCharset()); // unique contents to avoid dependency bundling
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2016 Bianca Jiang. All Rights Reserved.
|
||||
* Copyright (c) 2016 IBM Corporation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
@@ -43,7 +43,7 @@ import org.owasp.dependencycheck.dependency.Dependency;
|
||||
* {@link RubyGemspecAnalyzer}, so it will enabled/disabled with
|
||||
* {@link RubyGemspecAnalyzer}.
|
||||
*
|
||||
* @author Bianca Jiang (biancajiang@gmail.com)
|
||||
* @author Bianca Jiang (https://twitter.com/biancajiang)
|
||||
*/
|
||||
@Experimental
|
||||
public class RubyBundlerAnalyzer extends RubyGemspecAnalyzer {
|
||||
|
||||
@@ -217,6 +217,9 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
return name.contains(VERSION_FILE_NAME);
|
||||
}
|
||||
});
|
||||
if (matchingFiles == null) {
|
||||
return;
|
||||
}
|
||||
for (File f : matchingFiles) {
|
||||
try {
|
||||
final List<String> lines = FileUtils.readLines(f, Charset.defaultCharset());
|
||||
|
||||
@@ -0,0 +1,192 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2016 IBM Corporation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
* This analyzer is used to analyze the SWIFT Package Manager
|
||||
* (https://swift.org/package-manager/). It collects information about a package
|
||||
* from Package.swift files.
|
||||
*
|
||||
* @author Bianca Jiang (https://twitter.com/biancajiang)
|
||||
*/
|
||||
@Experimental
|
||||
public class SwiftPackageManagerAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "SWIFT Package Manager Analyzer";
|
||||
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
|
||||
|
||||
/**
|
||||
* The file name to scan.
|
||||
*/
|
||||
public static final String SPM_FILE_NAME = "Package.swift";
|
||||
|
||||
/**
|
||||
* Filter that detects files named "package.json".
|
||||
*/
|
||||
private static final FileFilter SPM_FILE_FILTER = FileFilterBuilder.newInstance().addFilenames(SPM_FILE_NAME).build();
|
||||
|
||||
/**
|
||||
* The capture group #1 is the block variable. e.g. "import
|
||||
* PackageDescription let package = Package( name: "Gloss" )"
|
||||
*/
|
||||
private static final Pattern SPM_BLOCK_PATTERN = Pattern.compile("let[^=]+=\\s*Package\\s*\\(\\s*([^)]*)\\s*\\)", Pattern.DOTALL);
|
||||
|
||||
/**
|
||||
* Returns the FileFilter
|
||||
*
|
||||
* @return the FileFilter
|
||||
*/
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return SPM_FILE_FILTER;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() {
|
||||
// NO-OP
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase that the analyzer is intended to run in.
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeFileType(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
|
||||
String contents;
|
||||
try {
|
||||
contents = FileUtils.readFileToString(dependency.getActualFile(), Charset.defaultCharset());
|
||||
} catch (IOException e) {
|
||||
throw new AnalysisException(
|
||||
"Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
final Matcher matcher = SPM_BLOCK_PATTERN.matcher(contents);
|
||||
if (matcher.find()) {
|
||||
final String packageDescription = matcher.group(1);
|
||||
if (packageDescription.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
final EvidenceCollection product = dependency.getProductEvidence();
|
||||
final EvidenceCollection vendor = dependency.getVendorEvidence();
|
||||
|
||||
//SPM is currently under development for SWIFT 3. Its current metadata includes package name and dependencies.
|
||||
//Future interesting metadata: version, license, homepage, author, summary, etc.
|
||||
final String name = addStringEvidence(product, packageDescription, "name", "name", Confidence.HIGHEST);
|
||||
if (name != null && !name.isEmpty()) {
|
||||
vendor.addEvidence(SPM_FILE_NAME, "name_project", name, Confidence.HIGHEST);
|
||||
}
|
||||
}
|
||||
setPackagePath(dependency);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts evidence from the package description and adds it to the given
|
||||
* evidence collection.
|
||||
*
|
||||
* @param evidences the evidence collection to update
|
||||
* @param packageDescription the text to extract evidence from
|
||||
* @param field the name of the field being searched for
|
||||
* @param fieldPattern the field pattern within the contents to search for
|
||||
* @param confidence the confidence level of the evidence if found
|
||||
* @return the string that was added as evidence
|
||||
*/
|
||||
private String addStringEvidence(EvidenceCollection evidences,
|
||||
String packageDescription, String field, String fieldPattern, Confidence confidence) {
|
||||
String value = "";
|
||||
|
||||
final Matcher matcher = Pattern.compile(
|
||||
String.format("%s *:\\s*\"([^\"]*)", fieldPattern), Pattern.DOTALL).matcher(packageDescription);
|
||||
if (matcher.find()) {
|
||||
value = matcher.group(1);
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
value = value.trim();
|
||||
if (value.length() > 0) {
|
||||
evidences.addEvidence(SPM_FILE_NAME, field, value, confidence);
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the package path on the given dependency.
|
||||
*
|
||||
* @param dep the dependency to update
|
||||
*/
|
||||
private void setPackagePath(Dependency dep) {
|
||||
final File file = new File(dep.getFilePath());
|
||||
final String parent = file.getParent();
|
||||
if (parent != null) {
|
||||
dep.setPackagePath(parent);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -51,7 +51,7 @@ public class CentralSearch {
|
||||
/**
|
||||
* Whether to use the Proxy when making requests
|
||||
*/
|
||||
private boolean useProxy;
|
||||
private final boolean useProxy;
|
||||
|
||||
/**
|
||||
* Used for logging.
|
||||
@@ -61,8 +61,8 @@ public class CentralSearch {
|
||||
/**
|
||||
* Creates a NexusSearch for the given repository URL.
|
||||
*
|
||||
* @param rootURL the URL of the repository on which searches should execute. Only parameters are added to this (so it should
|
||||
* end in /select)
|
||||
* @param rootURL the URL of the repository on which searches should
|
||||
* execute. Only parameters are added to this (so it should end in /select)
|
||||
*/
|
||||
public CentralSearch(URL rootURL) {
|
||||
this.rootURL = rootURL;
|
||||
@@ -76,18 +76,20 @@ public class CentralSearch {
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches the configured Central URL for the given sha1 hash. If the artifact is found, a <code>MavenArtifact</code> is
|
||||
* populated with the GAV.
|
||||
* Searches the configured Central URL for the given sha1 hash. If the
|
||||
* artifact is found, a <code>MavenArtifact</code> is populated with the
|
||||
* GAV.
|
||||
*
|
||||
* @param sha1 the SHA-1 hash string for which to search
|
||||
* @return the populated Maven GAV.
|
||||
* @throws IOException if it's unable to connect to the specified repository or if the specified artifact is not found.
|
||||
* @throws IOException if it's unable to connect to the specified repository
|
||||
* or if the specified artifact is not found.
|
||||
*/
|
||||
public List<MavenArtifact> searchSha1(String sha1) throws IOException {
|
||||
if (null == sha1 || !sha1.matches("^[0-9A-Fa-f]{40}$")) {
|
||||
throw new IllegalArgumentException("Invalid SHA1 format");
|
||||
}
|
||||
|
||||
List<MavenArtifact> result = null;
|
||||
final URL url = new URL(rootURL + String.format("?q=1:\"%s\"&wt=xml", sha1));
|
||||
|
||||
LOGGER.debug("Searching Central url {}", url);
|
||||
@@ -108,15 +110,16 @@ public class CentralSearch {
|
||||
if (conn.getResponseCode() == 200) {
|
||||
boolean missing = false;
|
||||
try {
|
||||
final DocumentBuilder builder = DocumentBuilderFactory
|
||||
.newInstance().newDocumentBuilder();
|
||||
final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
final DocumentBuilder builder = factory.newDocumentBuilder();
|
||||
final Document doc = builder.parse(conn.getInputStream());
|
||||
final XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
final String numFound = xpath.evaluate("/response/result/@numFound", doc);
|
||||
if ("0".equals(numFound)) {
|
||||
missing = true;
|
||||
} else {
|
||||
final List<MavenArtifact> result = new ArrayList<MavenArtifact>();
|
||||
result = new ArrayList<MavenArtifact>();
|
||||
final NodeList docs = (NodeList) xpath.evaluate("/response/result/doc", doc, XPathConstants.NODESET);
|
||||
for (int i = 0; i < docs.getLength(); i++) {
|
||||
final String g = xpath.evaluate("./str[@name='g']", docs.item(i));
|
||||
@@ -144,16 +147,12 @@ public class CentralSearch {
|
||||
useHTTPS = true;
|
||||
}
|
||||
}
|
||||
|
||||
LOGGER.trace("Version: {}", v);
|
||||
result.add(new MavenArtifact(g, a, v, jarAvailable, pomAvailable, useHTTPS));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
// Anything else is jacked up XML stuff that we really can't recover
|
||||
// from well
|
||||
// Anything else is jacked up XML stuff that we really can't recover from well
|
||||
throw new IOException(e.getMessage(), e);
|
||||
}
|
||||
|
||||
@@ -162,10 +161,9 @@ public class CentralSearch {
|
||||
}
|
||||
} else {
|
||||
LOGGER.debug("Could not connect to Central received response code: {} {}",
|
||||
conn.getResponseCode(), conn.getResponseMessage());
|
||||
conn.getResponseCode(), conn.getResponseMessage());
|
||||
throw new IOException("Could not connect to Central");
|
||||
}
|
||||
|
||||
return null;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -209,10 +209,13 @@ public final class CpeMemoryIndex {
|
||||
|
||||
final Set<Pair<String, String>> data = cve.getVendorProductList();
|
||||
for (Pair<String, String> pair : data) {
|
||||
v.setStringValue(pair.getLeft());
|
||||
p.setStringValue(pair.getRight());
|
||||
indexWriter.addDocument(doc);
|
||||
resetFieldAnalyzer();
|
||||
//todo figure out why there are null products
|
||||
if (pair.getLeft() != null && pair.getRight() != null) {
|
||||
v.setStringValue(pair.getLeft());
|
||||
p.setStringValue(pair.getRight());
|
||||
indexWriter.addDocument(doc);
|
||||
resetFieldAnalyzer();
|
||||
}
|
||||
}
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
|
||||
@@ -25,6 +25,7 @@ import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.URLConnectionFactory;
|
||||
@@ -47,7 +48,7 @@ public class NexusSearch {
|
||||
/**
|
||||
* Whether to use the Proxy when making requests.
|
||||
*/
|
||||
private boolean useProxy;
|
||||
private final boolean useProxy;
|
||||
/**
|
||||
* Used for logging.
|
||||
*/
|
||||
@@ -56,32 +57,26 @@ public class NexusSearch {
|
||||
/**
|
||||
* Creates a NexusSearch for the given repository URL.
|
||||
*
|
||||
* @param rootURL the root URL of the repository on which searches should execute. full URL's are calculated relative to this
|
||||
* URL, so it should end with a /
|
||||
* @param rootURL the root URL of the repository on which searches should
|
||||
* execute. full URL's are calculated relative to this URL, so it should end
|
||||
* with a /
|
||||
* @param useProxy flag indicating if the proxy settings should be used
|
||||
*/
|
||||
public NexusSearch(URL rootURL) {
|
||||
public NexusSearch(URL rootURL, boolean useProxy) {
|
||||
this.rootURL = rootURL;
|
||||
try {
|
||||
if (null != Settings.getString(Settings.KEYS.PROXY_SERVER)
|
||||
&& Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY)) {
|
||||
useProxy = true;
|
||||
LOGGER.debug("Using proxy");
|
||||
} else {
|
||||
useProxy = false;
|
||||
LOGGER.debug("Not using proxy");
|
||||
}
|
||||
} catch (InvalidSettingException ise) {
|
||||
useProxy = false;
|
||||
}
|
||||
this.useProxy = useProxy;
|
||||
LOGGER.debug("Using proxy: {}", useProxy);
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches the configured Nexus repository for the given sha1 hash. If the artifact is found, a <code>MavenArtifact</code> is
|
||||
* populated with the coordinate information.
|
||||
* Searches the configured Nexus repository for the given sha1 hash. If the
|
||||
* artifact is found, a <code>MavenArtifact</code> is populated with the
|
||||
* coordinate information.
|
||||
*
|
||||
* @param sha1 The SHA-1 hash string for which to search
|
||||
* @return the populated Maven coordinates
|
||||
* @throws IOException if it's unable to connect to the specified repository or if the specified artifact is not found.
|
||||
* @throws IOException if it's unable to connect to the specified repository
|
||||
* or if the specified artifact is not found.
|
||||
*/
|
||||
public MavenArtifact searchSha1(String sha1) throws IOException {
|
||||
if (null == sha1 || !sha1.matches("^[0-9A-Fa-f]{40}$")) {
|
||||
@@ -106,57 +101,60 @@ public class NexusSearch {
|
||||
conn.addRequestProperty("Accept", "application/xml");
|
||||
conn.connect();
|
||||
|
||||
if (conn.getResponseCode() == 200) {
|
||||
try {
|
||||
final DocumentBuilder builder = DocumentBuilderFactory
|
||||
.newInstance().newDocumentBuilder();
|
||||
final Document doc = builder.parse(conn.getInputStream());
|
||||
final XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
final String groupId = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/groupId",
|
||||
doc);
|
||||
final String artifactId = xpath.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/artifactId",
|
||||
doc);
|
||||
final String version = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/version",
|
||||
doc);
|
||||
final String link = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/artifactLink",
|
||||
doc);
|
||||
final String pomLink = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/pomLink",
|
||||
doc);
|
||||
final MavenArtifact ma = new MavenArtifact(groupId, artifactId, version);
|
||||
if (link != null && !link.isEmpty()) {
|
||||
ma.setArtifactUrl(link);
|
||||
switch (conn.getResponseCode()) {
|
||||
case 200:
|
||||
try {
|
||||
final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
final DocumentBuilder builder = factory.newDocumentBuilder();
|
||||
final Document doc = builder.parse(conn.getInputStream());
|
||||
final XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
final String groupId = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/groupId",
|
||||
doc);
|
||||
final String artifactId = xpath.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/artifactId",
|
||||
doc);
|
||||
final String version = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/version",
|
||||
doc);
|
||||
final String link = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/artifactLink",
|
||||
doc);
|
||||
final String pomLink = xpath
|
||||
.evaluate(
|
||||
"/org.sonatype.nexus.rest.model.NexusArtifact/pomLink",
|
||||
doc);
|
||||
final MavenArtifact ma = new MavenArtifact(groupId, artifactId, version);
|
||||
if (link != null && !link.isEmpty()) {
|
||||
ma.setArtifactUrl(link);
|
||||
}
|
||||
if (pomLink != null && !pomLink.isEmpty()) {
|
||||
ma.setPomUrl(pomLink);
|
||||
}
|
||||
return ma;
|
||||
} catch (Throwable e) {
|
||||
// Anything else is jacked-up XML stuff that we really can't recover
|
||||
// from well
|
||||
throw new IOException(e.getMessage(), e);
|
||||
}
|
||||
if (pomLink != null && !pomLink.isEmpty()) {
|
||||
ma.setPomUrl(pomLink);
|
||||
}
|
||||
return ma;
|
||||
} catch (Throwable e) {
|
||||
// Anything else is jacked-up XML stuff that we really can't recover
|
||||
// from well
|
||||
throw new IOException(e.getMessage(), e);
|
||||
}
|
||||
} else if (conn.getResponseCode() == 404) {
|
||||
throw new FileNotFoundException("Artifact not found in Nexus");
|
||||
} else {
|
||||
LOGGER.debug("Could not connect to Nexus received response code: {} {}",
|
||||
conn.getResponseCode(), conn.getResponseMessage());
|
||||
throw new IOException("Could not connect to Nexus");
|
||||
case 404:
|
||||
throw new FileNotFoundException("Artifact not found in Nexus");
|
||||
default:
|
||||
LOGGER.debug("Could not connect to Nexus received response code: {} {}",
|
||||
conn.getResponseCode(), conn.getResponseMessage());
|
||||
throw new IOException("Could not connect to Nexus");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Do a preflight request to see if the repository is actually working.
|
||||
*
|
||||
* @return whether the repository is listening and returns the /status URL correctly
|
||||
* @return whether the repository is listening and returns the /status URL
|
||||
* correctly
|
||||
*/
|
||||
public boolean preflightRequest() {
|
||||
HttpURLConnection conn;
|
||||
|
||||
@@ -36,7 +36,8 @@ public class XPathNuspecParser implements NuspecParser {
|
||||
* Gets the string value of a node or null if it's not present
|
||||
*
|
||||
* @param n the node to test
|
||||
* @return the string content of the node, or null if the node itself is null
|
||||
* @return the string content of the node, or null if the node itself is
|
||||
* null
|
||||
*/
|
||||
private String getOrNull(Node n) {
|
||||
if (n != null) {
|
||||
@@ -56,7 +57,10 @@ public class XPathNuspecParser implements NuspecParser {
|
||||
@Override
|
||||
public NugetPackage parse(InputStream stream) throws NuspecParseException {
|
||||
try {
|
||||
final Document d = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(stream);
|
||||
final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
final Document d = factory.newDocumentBuilder().parse(stream);
|
||||
|
||||
final XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
final NugetPackage nuspec = new NugetPackage();
|
||||
|
||||
|
||||
@@ -36,8 +36,10 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Loads the configured database driver and returns the database connection. If the embedded H2 database is used obtaining a
|
||||
* connection will ensure the database file exists and that the appropriate table structure has been created.
|
||||
* Loads the configured database driver and returns the database connection. If
|
||||
* the embedded H2 database is used obtaining a connection will ensure the
|
||||
* database file exists and that the appropriate table structure has been
|
||||
* created.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -87,12 +89,13 @@ public final class ConnectionFactory {
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the connection factory. Ensuring that the appropriate drivers are loaded and that a connection can be made
|
||||
* successfully.
|
||||
* Initializes the connection factory. Ensuring that the appropriate drivers
|
||||
* are loaded and that a connection can be made successfully.
|
||||
*
|
||||
* @throws DatabaseException thrown if we are unable to connect to the database
|
||||
* @throws DatabaseException thrown if we are unable to connect to the
|
||||
* database
|
||||
*/
|
||||
public static synchronized void initialize() throws DatabaseException {
|
||||
public static void initialize() throws DatabaseException {
|
||||
//this only needs to be called once.
|
||||
if (connectionString != null) {
|
||||
return;
|
||||
@@ -188,11 +191,12 @@ public final class ConnectionFactory {
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up resources and unloads any registered database drivers. This needs to be called to ensure the driver is
|
||||
* unregistered prior to the finalize method being called as during shutdown the class loader used to load the driver may be
|
||||
* unloaded prior to the driver being de-registered.
|
||||
* Cleans up resources and unloads any registered database drivers. This
|
||||
* needs to be called to ensure the driver is unregistered prior to the
|
||||
* finalize method being called as during shutdown the class loader used to
|
||||
* load the driver may be unloaded prior to the driver being de-registered.
|
||||
*/
|
||||
public static synchronized void cleanup() {
|
||||
public static void cleanup() {
|
||||
if (driver != null) {
|
||||
try {
|
||||
DriverManager.deregisterDriver(driver);
|
||||
@@ -210,10 +214,12 @@ public final class ConnectionFactory {
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new database connection object per the database configuration.
|
||||
* Constructs a new database connection object per the database
|
||||
* configuration.
|
||||
*
|
||||
* @return a database connection object
|
||||
* @throws DatabaseException thrown if there is an exception loading the database connection
|
||||
* @throws DatabaseException thrown if there is an exception loading the
|
||||
* database connection
|
||||
*/
|
||||
public static Connection getConnection() throws DatabaseException {
|
||||
initialize();
|
||||
@@ -228,10 +234,12 @@ public final class ConnectionFactory {
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the H2 database file exists. If it does not exist then the data structure will need to be created.
|
||||
* Determines if the H2 database file exists. If it does not exist then the
|
||||
* data structure will need to be created.
|
||||
*
|
||||
* @return true if the H2 database file does not exist; otherwise false
|
||||
* @throws IOException thrown if the data directory does not exist and cannot be created
|
||||
* @throws IOException thrown if the data directory does not exist and
|
||||
* cannot be created
|
||||
*/
|
||||
private static boolean h2DataFileExists() throws IOException {
|
||||
final File dir = Settings.getDataDirectory();
|
||||
@@ -241,7 +249,8 @@ public final class ConnectionFactory {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the database structure (tables and indexes) to store the CVE data.
|
||||
* Creates the database structure (tables and indexes) to store the CVE
|
||||
* data.
|
||||
*
|
||||
* @param conn the database connection
|
||||
* @throws DatabaseException thrown if there is a Database Exception
|
||||
@@ -271,14 +280,17 @@ public final class ConnectionFactory {
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the database schema by loading the upgrade script for the version specified. The intended use is that if the
|
||||
* current schema version is 2.9 then we would call updateSchema(conn, "2.9"). This would load the upgrade_2.9.sql file and
|
||||
* execute it against the database. The upgrade script must update the 'version' in the properties table.
|
||||
* Updates the database schema by loading the upgrade script for the version
|
||||
* specified. The intended use is that if the current schema version is 2.9
|
||||
* then we would call updateSchema(conn, "2.9"). This would load the
|
||||
* upgrade_2.9.sql file and execute it against the database. The upgrade
|
||||
* script must update the 'version' in the properties table.
|
||||
*
|
||||
* @param conn the database connection object
|
||||
* @param appExpectedVersion the schema version that the application expects
|
||||
* @param currentDbVersion the current schema version of the database
|
||||
* @throws DatabaseException thrown if there is an exception upgrading the database schema
|
||||
* @throws DatabaseException thrown if there is an exception upgrading the
|
||||
* database schema
|
||||
*/
|
||||
private static void updateSchema(Connection conn, DependencyVersion appExpectedVersion, DependencyVersion currentDbVersion)
|
||||
throws DatabaseException {
|
||||
@@ -340,15 +352,18 @@ public final class ConnectionFactory {
|
||||
}
|
||||
|
||||
/**
|
||||
* Counter to ensure that calls to ensureSchemaVersion does not end up in an endless loop.
|
||||
* Counter to ensure that calls to ensureSchemaVersion does not end up in an
|
||||
* endless loop.
|
||||
*/
|
||||
private static int callDepth = 0;
|
||||
|
||||
/**
|
||||
* Uses the provided connection to check the specified schema version within the database.
|
||||
* Uses the provided connection to check the specified schema version within
|
||||
* the database.
|
||||
*
|
||||
* @param conn the database connection object
|
||||
* @throws DatabaseException thrown if the schema version is not compatible with this version of dependency-check
|
||||
* @throws DatabaseException thrown if the schema version is not compatible
|
||||
* with this version of dependency-check
|
||||
*/
|
||||
private static void ensureSchemaVersion(Connection conn) throws DatabaseException {
|
||||
ResultSet rs = null;
|
||||
@@ -359,7 +374,13 @@ public final class ConnectionFactory {
|
||||
rs = ps.executeQuery();
|
||||
if (rs.next()) {
|
||||
final DependencyVersion appDbVersion = DependencyVersionUtil.parseVersion(DB_SCHEMA_VERSION);
|
||||
if (appDbVersion == null) {
|
||||
throw new DatabaseException("Invalid application database schema");
|
||||
}
|
||||
final DependencyVersion db = DependencyVersionUtil.parseVersion(rs.getString(1));
|
||||
if (db == null) {
|
||||
throw new DatabaseException("Invalid database schema");
|
||||
}
|
||||
if (appDbVersion.compareTo(db) > 0) {
|
||||
LOGGER.debug("Current Schema: {}", DB_SCHEMA_VERSION);
|
||||
LOGGER.debug("DB Schema: {}", rs.getString(1));
|
||||
|
||||
@@ -87,10 +87,12 @@ public class CveDB {
|
||||
open();
|
||||
try {
|
||||
final String databaseProductName = conn.getMetaData().getDatabaseProductName();
|
||||
batchSupported = conn.getMetaData().supportsBatchUpdates();
|
||||
LOGGER.debug("Database dialect: {}", databaseProductName);
|
||||
final Locale dbDialect = new Locale(databaseProductName);
|
||||
statementBundle = ResourceBundle.getBundle("data/dbStatements", dbDialect);
|
||||
if ("mysql".equalsIgnoreCase(databaseProductName)) {
|
||||
batchSupported = false;
|
||||
}
|
||||
} catch (SQLException se) {
|
||||
LOGGER.warn("Problem loading database specific dialect!", se);
|
||||
statementBundle = ResourceBundle.getBundle("data/dbStatements");
|
||||
@@ -117,7 +119,7 @@ public class CveDB {
|
||||
* @throws DatabaseException thrown if there is an error opening the
|
||||
* database connection
|
||||
*/
|
||||
public final void open() throws DatabaseException {
|
||||
public final synchronized void open() throws DatabaseException {
|
||||
if (!isOpen()) {
|
||||
conn = ConnectionFactory.getConnection();
|
||||
}
|
||||
@@ -127,7 +129,7 @@ public class CveDB {
|
||||
* Closes the DB4O database. Close should be called on this object when it
|
||||
* is done being used.
|
||||
*/
|
||||
public void close() {
|
||||
public synchronized void close() {
|
||||
if (conn != null) {
|
||||
try {
|
||||
conn.close();
|
||||
@@ -147,7 +149,7 @@ public class CveDB {
|
||||
*
|
||||
* @return whether the database connection is open or closed
|
||||
*/
|
||||
public boolean isOpen() {
|
||||
public synchronized boolean isOpen() {
|
||||
return conn != null;
|
||||
}
|
||||
|
||||
@@ -156,7 +158,7 @@ public class CveDB {
|
||||
*
|
||||
* @throws SQLException thrown if a SQL Exception occurs
|
||||
*/
|
||||
public void commit() throws SQLException {
|
||||
public synchronized void commit() throws SQLException {
|
||||
//temporary remove this as autocommit is on.
|
||||
//if (conn != null) {
|
||||
// conn.commit();
|
||||
@@ -200,7 +202,7 @@ public class CveDB {
|
||||
* analyzed
|
||||
* @return a set of vulnerable software
|
||||
*/
|
||||
public Set<VulnerableSoftware> getCPEs(String vendor, String product) {
|
||||
public synchronized Set<VulnerableSoftware> getCPEs(String vendor, String product) {
|
||||
final Set<VulnerableSoftware> cpe = new HashSet<VulnerableSoftware>();
|
||||
ResultSet rs = null;
|
||||
PreparedStatement ps = null;
|
||||
@@ -232,7 +234,7 @@ public class CveDB {
|
||||
* @throws DatabaseException thrown when there is an error retrieving the
|
||||
* data from the DB
|
||||
*/
|
||||
public Set<Pair<String, String>> getVendorProductList() throws DatabaseException {
|
||||
public synchronized Set<Pair<String, String>> getVendorProductList() throws DatabaseException {
|
||||
final Set<Pair<String, String>> data = new HashSet<Pair<String, String>>();
|
||||
ResultSet rs = null;
|
||||
PreparedStatement ps = null;
|
||||
@@ -257,7 +259,7 @@ public class CveDB {
|
||||
*
|
||||
* @return the properties from the database
|
||||
*/
|
||||
Properties getProperties() {
|
||||
synchronized Properties getProperties() {
|
||||
final Properties prop = new Properties();
|
||||
PreparedStatement ps = null;
|
||||
ResultSet rs = null;
|
||||
@@ -283,7 +285,7 @@ public class CveDB {
|
||||
* @param key the property key
|
||||
* @param value the property value
|
||||
*/
|
||||
void saveProperty(String key, String value) {
|
||||
synchronized void saveProperty(String key, String value) {
|
||||
try {
|
||||
try {
|
||||
final PreparedStatement mergeProperty = getConnection().prepareStatement(statementBundle.getString("MERGE_PROPERTY"));
|
||||
@@ -326,7 +328,7 @@ public class CveDB {
|
||||
* @return a list of Vulnerabilities
|
||||
* @throws DatabaseException thrown if there is an exception retrieving data
|
||||
*/
|
||||
public List<Vulnerability> getVulnerabilities(String cpeStr) throws DatabaseException {
|
||||
public synchronized List<Vulnerability> getVulnerabilities(String cpeStr) throws DatabaseException {
|
||||
final VulnerableSoftware cpe = new VulnerableSoftware();
|
||||
try {
|
||||
cpe.parseName(cpeStr);
|
||||
@@ -387,7 +389,7 @@ public class CveDB {
|
||||
* @return a vulnerability object
|
||||
* @throws DatabaseException if an exception occurs
|
||||
*/
|
||||
public Vulnerability getVulnerability(String cve) throws DatabaseException {
|
||||
public synchronized Vulnerability getVulnerability(String cve) throws DatabaseException {
|
||||
PreparedStatement psV = null;
|
||||
PreparedStatement psR = null;
|
||||
PreparedStatement psS = null;
|
||||
@@ -460,7 +462,7 @@ public class CveDB {
|
||||
* @param vuln the vulnerability to add to the database
|
||||
* @throws DatabaseException is thrown if the database
|
||||
*/
|
||||
public void updateVulnerability(Vulnerability vuln) throws DatabaseException {
|
||||
public synchronized void updateVulnerability(Vulnerability vuln) throws DatabaseException {
|
||||
PreparedStatement selectVulnerabilityId = null;
|
||||
PreparedStatement deleteVulnerability = null;
|
||||
PreparedStatement deleteReferences = null;
|
||||
@@ -636,7 +638,7 @@ public class CveDB {
|
||||
*
|
||||
* @return <code>true</code> if data exists; otherwise <code>false</code>
|
||||
*/
|
||||
public boolean dataExists() {
|
||||
public synchronized boolean dataExists() {
|
||||
Statement cs = null;
|
||||
ResultSet rs = null;
|
||||
try {
|
||||
@@ -658,7 +660,7 @@ public class CveDB {
|
||||
+ "If the problem persist try deleting the files in '{}' and running {} again. If the problem continues, please "
|
||||
+ "create a log file (see documentation at http://jeremylong.github.io/DependencyCheck/) and open a ticket at "
|
||||
+ "https://github.com/jeremylong/DependencyCheck/issues and include the log file.\n\n",
|
||||
dd, dd, Settings.getString(Settings.KEYS.APPLICATION_VAME));
|
||||
dd, dd, Settings.getString(Settings.KEYS.APPLICATION_NAME));
|
||||
LOGGER.debug("", ex);
|
||||
} finally {
|
||||
DBUtils.closeResultSet(rs);
|
||||
@@ -672,7 +674,7 @@ public class CveDB {
|
||||
* updates. This should be called after all updates have been completed to
|
||||
* ensure orphan entries are removed.
|
||||
*/
|
||||
public void cleanupDatabase() {
|
||||
public synchronized void cleanupDatabase() {
|
||||
PreparedStatement ps = null;
|
||||
try {
|
||||
ps = getConnection().prepareStatement(statementBundle.getString("CLEANUP_ORPHANS"));
|
||||
@@ -810,7 +812,7 @@ public class CveDB {
|
||||
*
|
||||
* Deletes unused dictionary entries from the database.
|
||||
*/
|
||||
public void deleteUnusedCpe() {
|
||||
public synchronized void deleteUnusedCpe() {
|
||||
PreparedStatement ps = null;
|
||||
try {
|
||||
ps = getConnection().prepareStatement(statementBundle.getString("DELETE_UNUSED_DICT_CPE"));
|
||||
@@ -832,7 +834,7 @@ public class CveDB {
|
||||
* @param vendor the CPE vendor
|
||||
* @param product the CPE product
|
||||
*/
|
||||
public void addCpe(String cpe, String vendor, String product) {
|
||||
public synchronized void addCpe(String cpe, String vendor, String product) {
|
||||
PreparedStatement ps = null;
|
||||
try {
|
||||
ps = getConnection().prepareStatement(statementBundle.getString("ADD_DICT_CPE"));
|
||||
|
||||
@@ -44,11 +44,14 @@ import org.xml.sax.SAXException;
|
||||
|
||||
/**
|
||||
*
|
||||
* This class is currently unused and if enabled will likely not work on MySQL as the MERGE statement is used.
|
||||
* This class is currently unused and if enabled will likely not work on MySQL
|
||||
* as the MERGE statement is used.
|
||||
*
|
||||
* The CpeUpdater is designed to download the CPE data file from NIST and import the data into the database. However, as this
|
||||
* currently adds no beneficial data, compared to what is in the CPE data contained in the CVE data files, this class is not
|
||||
* currently used. The code is being kept as a future update may utilize more data from the CPE xml files.
|
||||
* The CpeUpdater is designed to download the CPE data file from NIST and import
|
||||
* the data into the database. However, as this currently adds no beneficial
|
||||
* data, compared to what is in the CPE data contained in the CVE data files,
|
||||
* this class is not currently used. The code is being kept as a future update
|
||||
* may utilize more data from the CPE XML files.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -84,7 +87,8 @@ public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
* Downloads the CPE XML file.
|
||||
*
|
||||
* @return the file reference to the CPE.xml file
|
||||
* @throws UpdateException thrown if there is an issue downloading the XML file
|
||||
* @throws UpdateException thrown if there is an issue downloading the XML
|
||||
* file
|
||||
*/
|
||||
private File downloadCpe() throws UpdateException {
|
||||
File xml;
|
||||
@@ -112,11 +116,13 @@ public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
*
|
||||
* @param xml the CPE data file
|
||||
* @return the list of CPE entries
|
||||
* @throws UpdateException thrown if there is an issue with parsing the XML file
|
||||
* @throws UpdateException thrown if there is an issue with parsing the XML
|
||||
* file
|
||||
*/
|
||||
private List<Cpe> processXML(final File xml) throws UpdateException {
|
||||
try {
|
||||
final SAXParserFactory factory = SAXParserFactory.newInstance();
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
final SAXParser saxParser = factory.newSAXParser();
|
||||
final CPEHandler handler = new CPEHandler();
|
||||
saxParser.parse(xml, handler);
|
||||
@@ -131,7 +137,8 @@ public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks to find the last time the CPE data was refreshed and if it needs to be updated.
|
||||
* Checks to find the last time the CPE data was refreshed and if it needs
|
||||
* to be updated.
|
||||
*
|
||||
* @return true if the CPE data should be refreshed
|
||||
*/
|
||||
@@ -147,7 +154,8 @@ public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the file contained in a gzip archive. The extracted file is placed in the exact same path as the file specified.
|
||||
* Extracts the file contained in a gzip archive. The extracted file is
|
||||
* placed in the exact same path as the file specified.
|
||||
*
|
||||
* @param file the archive file
|
||||
* @throws FileNotFoundException thrown if the file does not exist
|
||||
@@ -158,6 +166,7 @@ public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
final String originalPath = file.getPath();
|
||||
final File gzip = new File(originalPath + ".gz");
|
||||
if (gzip.isFile() && !gzip.delete()) {
|
||||
LOGGER.debug("Failed to delete intial temporary file {}", gzip.toString());
|
||||
gzip.deleteOnExit();
|
||||
}
|
||||
if (!file.renameTo(gzip)) {
|
||||
@@ -192,8 +201,9 @@ public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
LOGGER.trace("ignore", ex);
|
||||
}
|
||||
}
|
||||
if (gzip.isFile()) {
|
||||
FileUtils.deleteQuietly(gzip);
|
||||
if (gzip.isFile() && !FileUtils.deleteQuietly(gzip)) {
|
||||
LOGGER.debug("Failed to delete temporary file {}", gzip.toString());
|
||||
gzip.deleteOnExit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,7 +118,7 @@ public class EngineVersionCheck implements CachedWebDataSource {
|
||||
}
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.debug("Database Exception opening databases to retrieve properties", ex);
|
||||
throw new UpdateException("Error occured updating database properties.");
|
||||
throw new UpdateException("Error occurred updating database properties.");
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.debug("Unable to determine if autoupdate is enabled", ex);
|
||||
} finally {
|
||||
@@ -220,11 +220,11 @@ public class EngineVersionCheck implements CachedWebDataSource {
|
||||
return releaseVersion.trim();
|
||||
}
|
||||
} catch (MalformedURLException ex) {
|
||||
LOGGER.debug("Unable to retrieve current release version of dependency-check", ex);
|
||||
LOGGER.debug("Unable to retrieve current release version of dependency-check - malformed url?");
|
||||
} catch (URLConnectionFailureException ex) {
|
||||
LOGGER.debug("Unable to retrieve current release version of dependency-check", ex);
|
||||
LOGGER.debug("Unable to retrieve current release version of dependency-check - connection failed");
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Unable to retrieve current release version of dependency-check", ex);
|
||||
LOGGER.debug("Unable to retrieve current release version of dependency-check - i/o exception");
|
||||
} finally {
|
||||
if (conn != null) {
|
||||
conn.disconnect();
|
||||
|
||||
@@ -77,10 +77,10 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
}
|
||||
if (autoUpdate && checkUpdate()) {
|
||||
final UpdateableNvdCve updateable = getUpdatesNeeded();
|
||||
getProperties().save(DatabaseProperties.LAST_CHECKED, Long.toString(System.currentTimeMillis()));
|
||||
if (updateable.isUpdateNeeded()) {
|
||||
performUpdate(updateable);
|
||||
}
|
||||
getProperties().save(DatabaseProperties.LAST_CHECKED, Long.toString(System.currentTimeMillis()));
|
||||
}
|
||||
} catch (MalformedURLException ex) {
|
||||
throw new UpdateException("NVD CVE properties files contain an invalid URL, unable to update the data to use the most current data.", ex);
|
||||
@@ -156,93 +156,86 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
|
||||
* @throws UpdateException is thrown if there is an error updating the
|
||||
* database
|
||||
*/
|
||||
public void performUpdate(UpdateableNvdCve updateable) throws UpdateException {
|
||||
private void performUpdate(UpdateableNvdCve updateable) throws UpdateException {
|
||||
int maxUpdates = 0;
|
||||
try {
|
||||
for (NvdCveInfo cve : updateable) {
|
||||
if (cve.getNeedsUpdate()) {
|
||||
maxUpdates += 1;
|
||||
for (NvdCveInfo cve : updateable) {
|
||||
if (cve.getNeedsUpdate()) {
|
||||
maxUpdates += 1;
|
||||
}
|
||||
}
|
||||
if (maxUpdates <= 0) {
|
||||
return;
|
||||
}
|
||||
if (maxUpdates > 3) {
|
||||
LOGGER.info("NVD CVE requires several updates; this could take a couple of minutes.");
|
||||
}
|
||||
|
||||
final int poolSize = (MAX_THREAD_POOL_SIZE < maxUpdates) ? MAX_THREAD_POOL_SIZE : maxUpdates;
|
||||
|
||||
final ExecutorService downloadExecutors = Executors.newFixedThreadPool(poolSize);
|
||||
final ExecutorService processExecutor = Executors.newSingleThreadExecutor();
|
||||
final Set<Future<Future<ProcessTask>>> downloadFutures = new HashSet<Future<Future<ProcessTask>>>(maxUpdates);
|
||||
for (NvdCveInfo cve : updateable) {
|
||||
if (cve.getNeedsUpdate()) {
|
||||
final DownloadTask call = new DownloadTask(cve, processExecutor, getCveDB(), Settings.getInstance());
|
||||
downloadFutures.add(downloadExecutors.submit(call));
|
||||
}
|
||||
}
|
||||
downloadExecutors.shutdown();
|
||||
|
||||
//next, move the future future processTasks to just future processTasks
|
||||
final Set<Future<ProcessTask>> processFutures = new HashSet<Future<ProcessTask>>(maxUpdates);
|
||||
for (Future<Future<ProcessTask>> future : downloadFutures) {
|
||||
Future<ProcessTask> task = null;
|
||||
try {
|
||||
task = future.get();
|
||||
} catch (InterruptedException ex) {
|
||||
downloadExecutors.shutdownNow();
|
||||
processExecutor.shutdownNow();
|
||||
|
||||
LOGGER.debug("Thread was interrupted during download", ex);
|
||||
throw new UpdateException("The download was interrupted", ex);
|
||||
} catch (ExecutionException ex) {
|
||||
downloadExecutors.shutdownNow();
|
||||
processExecutor.shutdownNow();
|
||||
|
||||
LOGGER.debug("Thread was interrupted during download execution", ex);
|
||||
throw new UpdateException("The execution of the download was interrupted", ex);
|
||||
}
|
||||
if (task == null) {
|
||||
downloadExecutors.shutdownNow();
|
||||
processExecutor.shutdownNow();
|
||||
LOGGER.debug("Thread was interrupted during download");
|
||||
throw new UpdateException("The download was interrupted; unable to complete the update");
|
||||
} else {
|
||||
processFutures.add(task);
|
||||
}
|
||||
}
|
||||
|
||||
for (Future<ProcessTask> future : processFutures) {
|
||||
try {
|
||||
final ProcessTask task = future.get();
|
||||
if (task.getException() != null) {
|
||||
throw task.getException();
|
||||
}
|
||||
} catch (InterruptedException ex) {
|
||||
processExecutor.shutdownNow();
|
||||
LOGGER.debug("Thread was interrupted during processing", ex);
|
||||
throw new UpdateException(ex);
|
||||
} catch (ExecutionException ex) {
|
||||
processExecutor.shutdownNow();
|
||||
LOGGER.debug("Execution Exception during process", ex);
|
||||
throw new UpdateException(ex);
|
||||
} finally {
|
||||
processExecutor.shutdown();
|
||||
}
|
||||
if (maxUpdates <= 0) {
|
||||
return;
|
||||
}
|
||||
if (maxUpdates > 3) {
|
||||
LOGGER.info("NVD CVE requires several updates; this could take a couple of minutes.");
|
||||
}
|
||||
if (maxUpdates > 0) {
|
||||
openDataStores();
|
||||
}
|
||||
}
|
||||
|
||||
final int poolSize = (MAX_THREAD_POOL_SIZE < maxUpdates) ? MAX_THREAD_POOL_SIZE : maxUpdates;
|
||||
|
||||
final ExecutorService downloadExecutors = Executors.newFixedThreadPool(poolSize);
|
||||
final ExecutorService processExecutor = Executors.newSingleThreadExecutor();
|
||||
final Set<Future<Future<ProcessTask>>> downloadFutures = new HashSet<Future<Future<ProcessTask>>>(maxUpdates);
|
||||
for (NvdCveInfo cve : updateable) {
|
||||
if (cve.getNeedsUpdate()) {
|
||||
final DownloadTask call = new DownloadTask(cve, processExecutor, getCveDB(), Settings.getInstance());
|
||||
downloadFutures.add(downloadExecutors.submit(call));
|
||||
}
|
||||
}
|
||||
downloadExecutors.shutdown();
|
||||
|
||||
//next, move the future future processTasks to just future processTasks
|
||||
final Set<Future<ProcessTask>> processFutures = new HashSet<Future<ProcessTask>>(maxUpdates);
|
||||
for (Future<Future<ProcessTask>> future : downloadFutures) {
|
||||
Future<ProcessTask> task = null;
|
||||
try {
|
||||
task = future.get();
|
||||
} catch (InterruptedException ex) {
|
||||
downloadExecutors.shutdownNow();
|
||||
processExecutor.shutdownNow();
|
||||
|
||||
LOGGER.debug("Thread was interrupted during download", ex);
|
||||
throw new UpdateException("The download was interrupted", ex);
|
||||
} catch (ExecutionException ex) {
|
||||
downloadExecutors.shutdownNow();
|
||||
processExecutor.shutdownNow();
|
||||
|
||||
LOGGER.debug("Thread was interrupted during download execution", ex);
|
||||
throw new UpdateException("The execution of the download was interrupted", ex);
|
||||
}
|
||||
if (task == null) {
|
||||
downloadExecutors.shutdownNow();
|
||||
processExecutor.shutdownNow();
|
||||
LOGGER.debug("Thread was interrupted during download");
|
||||
throw new UpdateException("The download was interrupted; unable to complete the update");
|
||||
} else {
|
||||
processFutures.add(task);
|
||||
}
|
||||
}
|
||||
|
||||
for (Future<ProcessTask> future : processFutures) {
|
||||
try {
|
||||
final ProcessTask task = future.get();
|
||||
if (task.getException() != null) {
|
||||
throw task.getException();
|
||||
}
|
||||
} catch (InterruptedException ex) {
|
||||
processExecutor.shutdownNow();
|
||||
LOGGER.debug("Thread was interrupted during processing", ex);
|
||||
throw new UpdateException(ex);
|
||||
} catch (ExecutionException ex) {
|
||||
processExecutor.shutdownNow();
|
||||
LOGGER.debug("Execution Exception during process", ex);
|
||||
throw new UpdateException(ex);
|
||||
} finally {
|
||||
processExecutor.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
if (maxUpdates >= 1) { //ensure the modified file date gets written (we may not have actually updated it)
|
||||
getProperties().save(updateable.get(MODIFIED));
|
||||
LOGGER.info("Begin database maintenance.");
|
||||
getCveDB().cleanupDatabase();
|
||||
LOGGER.info("End database maintenance.");
|
||||
}
|
||||
} finally {
|
||||
closeDataStores();
|
||||
if (maxUpdates >= 1) { //ensure the modified file date gets written (we may not have actually updated it)
|
||||
getProperties().save(updateable.get(MODIFIED));
|
||||
LOGGER.info("Begin database maintenance.");
|
||||
getCveDB().cleanupDatabase();
|
||||
LOGGER.info("End database maintenance.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -55,8 +55,9 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
|
||||
* @param nvdCveInfo the NVD CVE info
|
||||
* @param processor the processor service to submit the downloaded files to
|
||||
* @param cveDB the CVE DB to use to store the vulnerability data
|
||||
* @param settings a reference to the global settings object; this is necessary so that when the thread is started the
|
||||
* dependencies have a correct reference to the global settings.
|
||||
* @param settings a reference to the global settings object; this is
|
||||
* necessary so that when the thread is started the dependencies have a
|
||||
* correct reference to the global settings.
|
||||
* @throws UpdateException thrown if temporary files could not be created
|
||||
*/
|
||||
public DownloadTask(NvdCveInfo nvdCveInfo, ExecutorService processor, CveDB cveDB, Settings settings) throws UpdateException {
|
||||
@@ -205,25 +206,13 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
|
||||
* Attempts to delete the files that were downloaded.
|
||||
*/
|
||||
public void cleanup() {
|
||||
boolean deleted = false;
|
||||
try {
|
||||
if (first != null && first.exists()) {
|
||||
deleted = first.delete();
|
||||
}
|
||||
} finally {
|
||||
if (first != null && (first.exists() || !deleted)) {
|
||||
first.deleteOnExit();
|
||||
}
|
||||
if (first != null && first.exists() && first.delete()) {
|
||||
LOGGER.debug("Failed to delete first temporary file {}", second.toString());
|
||||
first.deleteOnExit();
|
||||
}
|
||||
try {
|
||||
deleted = false;
|
||||
if (second != null && second.exists()) {
|
||||
deleted = second.delete();
|
||||
}
|
||||
} finally {
|
||||
if (second != null && (second.exists() || !deleted)) {
|
||||
second.deleteOnExit();
|
||||
}
|
||||
if (second != null && second.exists() && !second.delete()) {
|
||||
LOGGER.debug("Failed to delete second temporary file {}", second.toString());
|
||||
second.deleteOnExit();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -268,7 +257,8 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the file contained in a gzip archive. The extracted file is placed in the exact same path as the file specified.
|
||||
* Extracts the file contained in a gzip archive. The extracted file is
|
||||
* placed in the exact same path as the file specified.
|
||||
*
|
||||
* @param file the archive file
|
||||
* @throws FileNotFoundException thrown if the file does not exist
|
||||
@@ -278,6 +268,7 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
|
||||
final String originalPath = file.getPath();
|
||||
final File gzip = new File(originalPath + ".gz");
|
||||
if (gzip.isFile() && !gzip.delete()) {
|
||||
LOGGER.debug("Failed to delete initial temporary file when extracting 'gz' {}", gzip.toString());
|
||||
gzip.deleteOnExit();
|
||||
}
|
||||
if (!file.renameTo(gzip)) {
|
||||
@@ -312,8 +303,9 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
|
||||
LOGGER.trace("ignore", ex);
|
||||
}
|
||||
}
|
||||
if (gzip.isFile()) {
|
||||
FileUtils.deleteQuietly(gzip);
|
||||
if (gzip.isFile() && !FileUtils.deleteQuietly(gzip)) {
|
||||
LOGGER.debug("Failed to delete temporary file when extracting 'gz' {}", gzip.toString());
|
||||
gzip.deleteOnExit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,7 +38,8 @@ import org.slf4j.LoggerFactory;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
/**
|
||||
* A callable task that will process a given set of NVD CVE xml files and update the Cve Database accordingly.
|
||||
* A callable task that will process a given set of NVD CVE xml files and update
|
||||
* the Cve Database accordingly.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -91,9 +92,11 @@ public class ProcessTask implements Callable<ProcessTask> {
|
||||
* Constructs a new ProcessTask used to process an NVD CVE update.
|
||||
*
|
||||
* @param cveDB the data store object
|
||||
* @param filePair the download task that contains the URL references to download
|
||||
* @param settings a reference to the global settings object; this is necessary so that when the thread is started the
|
||||
* dependencies have a correct reference to the global settings.
|
||||
* @param filePair the download task that contains the URL references to
|
||||
* download
|
||||
* @param settings a reference to the global settings object; this is
|
||||
* necessary so that when the thread is started the dependencies have a
|
||||
* correct reference to the global settings.
|
||||
*/
|
||||
public ProcessTask(final CveDB cveDB, final DownloadTask filePair, Settings settings) {
|
||||
this.cveDB = cveDB;
|
||||
@@ -106,8 +109,8 @@ public class ProcessTask implements Callable<ProcessTask> {
|
||||
* Implements the callable interface.
|
||||
*
|
||||
* @return this object
|
||||
* @throws Exception thrown if there is an exception; note that any UpdateExceptions are simply added to the tasks exception
|
||||
* collection
|
||||
* @throws Exception thrown if there is an exception; note that any
|
||||
* UpdateExceptions are simply added to the tasks exception collection
|
||||
*/
|
||||
@Override
|
||||
public ProcessTask call() throws Exception {
|
||||
@@ -127,17 +130,20 @@ public class ProcessTask implements Callable<ProcessTask> {
|
||||
*
|
||||
* @param file the file containing the NVD CVE XML
|
||||
* @param oldVersion contains the file containing the NVD CVE XML 1.2
|
||||
* @throws ParserConfigurationException is thrown if there is a parser configuration exception
|
||||
* @throws ParserConfigurationException is thrown if there is a parser
|
||||
* configuration exception
|
||||
* @throws SAXException is thrown if there is a SAXException
|
||||
* @throws IOException is thrown if there is a IO Exception
|
||||
* @throws SQLException is thrown if there is a SQL exception
|
||||
* @throws DatabaseException is thrown if there is a database exception
|
||||
* @throws ClassNotFoundException thrown if the h2 database driver cannot be loaded
|
||||
* @throws ClassNotFoundException thrown if the h2 database driver cannot be
|
||||
* loaded
|
||||
*/
|
||||
protected void importXML(File file, File oldVersion) throws ParserConfigurationException,
|
||||
SAXException, IOException, SQLException, DatabaseException, ClassNotFoundException {
|
||||
|
||||
final SAXParserFactory factory = SAXParserFactory.newInstance();
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
final SAXParser saxParser = factory.newSAXParser();
|
||||
|
||||
final NvdCve12Handler cve12Handler = new NvdCve12Handler();
|
||||
@@ -153,7 +159,8 @@ public class ProcessTask implements Callable<ProcessTask> {
|
||||
/**
|
||||
* Processes the NVD CVE XML file and imports the data into the DB.
|
||||
*
|
||||
* @throws UpdateException thrown if there is an error loading the data into the database
|
||||
* @throws UpdateException thrown if there is an error loading the data into
|
||||
* the database
|
||||
*/
|
||||
private void processFiles() throws UpdateException {
|
||||
LOGGER.info("Processing Started for NVD CVE - {}", filePair.getNvdCveInfo().getId());
|
||||
@@ -180,6 +187,6 @@ public class ProcessTask implements Callable<ProcessTask> {
|
||||
filePair.cleanup();
|
||||
}
|
||||
LOGGER.info("Processing Complete for NVD CVE - {} ({} ms)", filePair.getNvdCveInfo().getId(),
|
||||
System.currentTimeMillis() - startProcessing);
|
||||
System.currentTimeMillis() - startProcessing);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,9 +53,9 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
/**
|
||||
* Creates a new Evidence objects.
|
||||
*
|
||||
* @param source the source of the evidence.
|
||||
* @param name the name of the evidence.
|
||||
* @param value the value of the evidence.
|
||||
* @param source the source of the evidence.
|
||||
* @param name the name of the evidence.
|
||||
* @param value the value of the evidence.
|
||||
* @param confidence the confidence of the evidence.
|
||||
*/
|
||||
public Evidence(String source, String name, String value, Confidence confidence) {
|
||||
@@ -127,9 +127,11 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of value. If setUsed is set to false this call to get will not mark the evidence as used.
|
||||
* Get the value of value. If setUsed is set to false this call to get will
|
||||
* not mark the evidence as used.
|
||||
*
|
||||
* @param setUsed whether or not this call to getValue should cause the used flag to be updated
|
||||
* @param setUsed whether or not this call to getValue should cause the used
|
||||
* flag to be updated
|
||||
* @return the value of value
|
||||
*/
|
||||
public String getValue(Boolean setUsed) {
|
||||
@@ -200,11 +202,11 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return new HashCodeBuilder(MAGIC_HASH_INIT_VALUE, MAGIC_HASH_MULTIPLIER)
|
||||
.append(StringUtils.lowerCase(name))
|
||||
.append(StringUtils.lowerCase(source))
|
||||
.append(StringUtils.lowerCase(value))
|
||||
.append(confidence)
|
||||
.toHashCode();
|
||||
.append(StringUtils.lowerCase(name))
|
||||
.append(StringUtils.lowerCase(source))
|
||||
.append(StringUtils.lowerCase(value))
|
||||
.append(confidence)
|
||||
.toHashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -213,6 +215,7 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
* @param that an object to check the equality of.
|
||||
* @return whether the two objects are equal.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public boolean equals(Object that) {
|
||||
if (this == that) {
|
||||
@@ -223,6 +226,8 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
}
|
||||
final Evidence e = (Evidence) that;
|
||||
|
||||
//TODO the call to ObjectUtils.equals needs to be replaced when we
|
||||
//stop supporting Jenkins 1.6 requirement.
|
||||
return StringUtils.equalsIgnoreCase(name, e.name)
|
||||
&& StringUtils.equalsIgnoreCase(source, e.source)
|
||||
&& StringUtils.equalsIgnoreCase(value, e.value)
|
||||
@@ -235,6 +240,7 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
* @param o the evidence being compared
|
||||
* @return an integer indicating the ordering of the two objects
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public int compareTo(Evidence o) {
|
||||
if (o == null) {
|
||||
@@ -243,6 +249,8 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
if (StringUtils.equalsIgnoreCase(source, o.source)) {
|
||||
if (StringUtils.equalsIgnoreCase(name, o.name)) {
|
||||
if (StringUtils.equalsIgnoreCase(value, o.value)) {
|
||||
//TODO the call to ObjectUtils.equals needs to be replaced when we
|
||||
//stop supporting Jenkins 1.6 requirement.
|
||||
if (ObjectUtils.equals(confidence, o.confidence)) {
|
||||
return 0; //they are equal
|
||||
} else {
|
||||
@@ -260,10 +268,11 @@ public class Evidence implements Serializable, Comparable<Evidence> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around {@link java.lang.String#compareToIgnoreCase(java.lang.String) String.compareToIgnoreCase} with an
|
||||
* exhaustive, possibly duplicative, check against nulls.
|
||||
* Wrapper around
|
||||
* {@link java.lang.String#compareToIgnoreCase(java.lang.String) String.compareToIgnoreCase}
|
||||
* with an exhaustive, possibly duplicative, check against nulls.
|
||||
*
|
||||
* @param me the value to be compared
|
||||
* @param me the value to be compared
|
||||
* @param other the other value to be compared
|
||||
* @return true if the values are equal; otherwise false
|
||||
*/
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
package org.owasp.dependencycheck.dependency;
|
||||
|
||||
import java.io.Serializable;
|
||||
import org.apache.commons.lang3.builder.CompareToBuilder;
|
||||
|
||||
/**
|
||||
* An external reference for a vulnerability. This contains a name, URL, and a
|
||||
@@ -141,18 +142,10 @@ public class Reference implements Serializable, Comparable<Reference> {
|
||||
*/
|
||||
@Override
|
||||
public int compareTo(Reference o) {
|
||||
if (source.equals(o.source)) {
|
||||
if (name.equals(o.name)) {
|
||||
if (url.equals(o.url)) {
|
||||
return 0; //they are equal
|
||||
} else {
|
||||
return url.compareTo(o.url);
|
||||
}
|
||||
} else {
|
||||
return name.compareTo(o.name);
|
||||
}
|
||||
} else {
|
||||
return source.compareTo(o.source);
|
||||
}
|
||||
return new CompareToBuilder()
|
||||
.append(source, o.source)
|
||||
.append(name, o.name)
|
||||
.append(url, o.url)
|
||||
.toComparison();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.io.Serializable;
|
||||
import java.util.Set;
|
||||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
import org.apache.commons.lang3.builder.CompareToBuilder;
|
||||
|
||||
/**
|
||||
* Contains the information about a vulnerability.
|
||||
@@ -161,7 +162,8 @@ public class Vulnerability implements Serializable, Comparable<Vulnerability> {
|
||||
* Adds an entry for vulnerable software.
|
||||
*
|
||||
* @param cpe string representation of a cpe
|
||||
* @param previousVersion the previous version (previousVersion - cpe would be considered vulnerable)
|
||||
* @param previousVersion the previous version (previousVersion - cpe would
|
||||
* be considered vulnerable)
|
||||
* @return if the add succeeded
|
||||
*/
|
||||
public boolean addVulnerableSoftware(String cpe, String previousVersion) {
|
||||
@@ -390,28 +392,32 @@ public class Vulnerability implements Serializable, Comparable<Vulnerability> {
|
||||
sb.append(this.name);
|
||||
sb.append("\nReferences:\n");
|
||||
for (Reference reference : this.references) {
|
||||
sb.append("=> ");
|
||||
sb.append(reference);
|
||||
sb.append("\n");
|
||||
sb.append("=> ");
|
||||
sb.append(reference);
|
||||
sb.append("\n");
|
||||
}
|
||||
sb.append("\nSoftware:\n");
|
||||
for (VulnerableSoftware software : this.vulnerableSoftware) {
|
||||
sb.append("=> ");
|
||||
sb.append(software);
|
||||
sb.append("\n");
|
||||
sb.append("=> ");
|
||||
sb.append(software);
|
||||
sb.append("\n");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares two vulnerabilities.
|
||||
*
|
||||
* @param v a vulnerability to be compared
|
||||
* @return a negative integer, zero, or a positive integer as this object is less than, equal to, or greater than
|
||||
* the specified vulnerability
|
||||
* @return a negative integer, zero, or a positive integer as this object is
|
||||
* less than, equal to, or greater than the specified vulnerability
|
||||
*/
|
||||
@Override
|
||||
public int compareTo(Vulnerability v) {
|
||||
return v.getName().compareTo(this.getName());
|
||||
return new CompareToBuilder()
|
||||
.append(this.name, v.name)
|
||||
.toComparison();
|
||||
//return v.getName().compareTo(this.getName());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -427,8 +433,8 @@ public class Vulnerability implements Serializable, Comparable<Vulnerability> {
|
||||
* Sets the CPE that caused this vulnerability to be flagged.
|
||||
*
|
||||
* @param cpeId a CPE identifier
|
||||
* @param previous a flag indicating whether or not all previous versions were affected (any non-null value is
|
||||
* considered true)
|
||||
* @param previous a flag indicating whether or not all previous versions
|
||||
* were affected (any non-null value is considered true)
|
||||
*/
|
||||
public void setMatchedCPE(String cpeId, String previous) {
|
||||
matchedCPE = cpeId;
|
||||
|
||||
@@ -25,7 +25,7 @@ import java.util.List;
|
||||
/**
|
||||
* A collection of several exceptions.
|
||||
*
|
||||
* @author Jeremy Lomg
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class ExceptionCollection extends Exception {
|
||||
|
||||
@@ -54,7 +54,7 @@ public class ExceptionCollection extends Exception {
|
||||
* Instantiates a new exception collection.
|
||||
*
|
||||
* @param exceptions a list of exceptions
|
||||
* @param fatal indicates if the exception that occurred is fatal - meaning
|
||||
* @param fatal indicates if any of the exceptions that occurred is fatal - meaning
|
||||
* that no analysis was performed.
|
||||
*/
|
||||
public ExceptionCollection(List<Throwable> exceptions, boolean fatal) {
|
||||
@@ -68,7 +68,7 @@ public class ExceptionCollection extends Exception {
|
||||
*
|
||||
* @param msg the exception message
|
||||
* @param exceptions a list of exceptions
|
||||
* @param fatal indicates if the exception that occurred is fatal - meaning
|
||||
* @param fatal indicates if any of the exceptions that occurred is fatal - meaning
|
||||
* that no analysis was performed.
|
||||
*/
|
||||
public ExceptionCollection(String msg, List<Throwable> exceptions, boolean fatal) {
|
||||
@@ -90,7 +90,8 @@ public class ExceptionCollection extends Exception {
|
||||
this.exceptions.add(exceptions);
|
||||
this.fatal = fatal;
|
||||
}
|
||||
/**
|
||||
|
||||
/**
|
||||
* Instantiates a new exception collection.
|
||||
*
|
||||
* @param msg the exception message
|
||||
@@ -180,7 +181,7 @@ public class ExceptionCollection extends Exception {
|
||||
*/
|
||||
@Override
|
||||
public void printStackTrace(PrintWriter s) {
|
||||
s.println("Multiple Exceptions Occured");
|
||||
s.println("Multiple Exceptions Occurred");
|
||||
super.printStackTrace(s);
|
||||
for (Throwable t : this.exceptions) {
|
||||
s.println("Next Exception:");
|
||||
@@ -195,7 +196,7 @@ public class ExceptionCollection extends Exception {
|
||||
*/
|
||||
@Override
|
||||
public void printStackTrace(PrintStream s) {
|
||||
s.println("Multiple Exceptions Occured");
|
||||
s.println("Multiple Exceptions Occurred");
|
||||
super.printStackTrace(s);
|
||||
for (Throwable t : this.exceptions) {
|
||||
s.println("Next Exception:");
|
||||
@@ -204,11 +205,23 @@ public class ExceptionCollection extends Exception {
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints the stack trace to standard error.
|
||||
* Returns the error message, including the message from all contained
|
||||
* exceptions.
|
||||
*
|
||||
* @return the error message
|
||||
*/
|
||||
@Override
|
||||
public void printStackTrace() {
|
||||
this.printStackTrace(System.err);
|
||||
public String getMessage() {
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
final String msg = super.getMessage();
|
||||
if (msg == null || msg.isEmpty()) {
|
||||
sb.append("One or more exceptions occurred during analysis:");
|
||||
} else {
|
||||
sb.append(msg);
|
||||
}
|
||||
for (Throwable t : this.exceptions) {
|
||||
sb.append("\n\t").append(t.getMessage());
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -26,14 +26,15 @@ import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Simple object to track the parts of a version number. The parts are contained in a List such that version 1.2.3 will
|
||||
* be stored as: <code>versionParts[0] = 1;
|
||||
* Simple object to track the parts of a version number. The parts are contained
|
||||
* in a List such that version 1.2.3 will be stored as: <code>versionParts[0] = 1;
|
||||
* versionParts[1] = 2;
|
||||
* versionParts[2] = 3;
|
||||
* </code></p>
|
||||
* <p>
|
||||
* Note, the parser contained in this class expects the version numbers to be separated by periods. If a different
|
||||
* separator is used the parser will likely fail.</p>
|
||||
* Note, the parser contained in this class expects the version numbers to be
|
||||
* separated by periods. If a different separator is used the parser will likely
|
||||
* fail.</p>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -47,8 +48,9 @@ public class DependencyVersion implements Iterable<String>, Comparable<Dependenc
|
||||
|
||||
/**
|
||||
* Constructor for a DependencyVersion that will parse a version string.
|
||||
* <b>Note</b>, this should only be used when the version passed in is already known to be a well formatted version
|
||||
* number. Otherwise, DependencyVersionUtil.parseVersion() should be used instead.
|
||||
* <b>Note</b>, this should only be used when the version passed in is
|
||||
* already known to be a well formatted version number. Otherwise,
|
||||
* DependencyVersionUtil.parseVersion() should be used instead.
|
||||
*
|
||||
* @param version the well formatted version number to parse
|
||||
*/
|
||||
@@ -57,8 +59,9 @@ public class DependencyVersion implements Iterable<String>, Comparable<Dependenc
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a version string into its sub parts: major, minor, revision, build, etc. <b>Note</b>, this should only be
|
||||
* used to parse something that is already known to be a version number.
|
||||
* Parses a version string into its sub parts: major, minor, revision,
|
||||
* build, etc. <b>Note</b>, this should only be used to parse something that
|
||||
* is already known to be a version number.
|
||||
*
|
||||
* @param version the version string to parse
|
||||
*/
|
||||
@@ -133,26 +136,33 @@ public class DependencyVersion implements Iterable<String>, Comparable<Dependenc
|
||||
return false;
|
||||
}
|
||||
final DependencyVersion other = (DependencyVersion) obj;
|
||||
final int max = (this.versionParts.size() < other.versionParts.size())
|
||||
final int minVersionMatchLength = (this.versionParts.size() < other.versionParts.size())
|
||||
? this.versionParts.size() : other.versionParts.size();
|
||||
final int maxVersionMatchLength = (this.versionParts.size() > other.versionParts.size())
|
||||
? this.versionParts.size() : other.versionParts.size();
|
||||
|
||||
if (minVersionMatchLength == 1 && maxVersionMatchLength >= 3) {
|
||||
return false;
|
||||
}
|
||||
|
||||
//TODO steal better version of code from compareTo
|
||||
for (int i = 0; i < max; i++) {
|
||||
for (int i = 0; i < minVersionMatchLength; i++) {
|
||||
final String thisPart = this.versionParts.get(i);
|
||||
final String otherPart = other.versionParts.get(i);
|
||||
if (!thisPart.equals(otherPart)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (this.versionParts.size() > max) {
|
||||
for (int i = max; i < this.versionParts.size(); i++) {
|
||||
if (this.versionParts.size() > minVersionMatchLength) {
|
||||
for (int i = minVersionMatchLength; i < this.versionParts.size(); i++) {
|
||||
if (!"0".equals(this.versionParts.get(i))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (other.versionParts.size() > max) {
|
||||
for (int i = max; i < other.versionParts.size(); i++) {
|
||||
if (other.versionParts.size() > minVersionMatchLength) {
|
||||
for (int i = minVersionMatchLength; i < other.versionParts.size(); i++) {
|
||||
if (!"0".equals(other.versionParts.get(i))) {
|
||||
return false;
|
||||
}
|
||||
@@ -180,8 +190,9 @@ public class DependencyVersion implements Iterable<String>, Comparable<Dependenc
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the three most major major version parts are identical. For instances, if version 1.2.3.4 was
|
||||
* compared to 1.2.3 this function would return true.
|
||||
* Determines if the three most major major version parts are identical. For
|
||||
* instances, if version 1.2.3.4 was compared to 1.2.3 this function would
|
||||
* return true.
|
||||
*
|
||||
* @param version the version number to compare
|
||||
* @return true if the first three major parts of the version are identical
|
||||
|
||||
@@ -24,7 +24,8 @@ import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* A utility class to extract version numbers from file names (or other strings containing version numbers.</p>
|
||||
* A utility class to extract version numbers from file names (or other strings
|
||||
* containing version numbers.</p>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -35,11 +36,19 @@ public final class DependencyVersionUtil {
|
||||
*/
|
||||
private static final Pattern RX_VERSION = Pattern.compile("\\d+(\\.\\d{1,6})+(\\.?([_-](release|beta|alpha|\\d+)|[a-zA-Z_-]{1,3}\\d{0,8}))?");
|
||||
/**
|
||||
* Regular expression to extract a single version number without periods. This is a last ditch effort just to check in case we
|
||||
* are missing a version number using the previous regex.
|
||||
* Regular expression to extract a single version number without periods.
|
||||
* This is a last ditch effort just to check in case we are missing a
|
||||
* version number using the previous regex.
|
||||
*/
|
||||
private static final Pattern RX_SINGLE_VERSION = Pattern.compile("\\d+(\\.?([_-](release|beta|alpha)|[a-zA-Z_-]{1,3}\\d{1,8}))?");
|
||||
|
||||
/**
|
||||
* Regular expression to extract the part before the version numbers if
|
||||
* there are any based on RX_VERSION. In most cases, this part represents a
|
||||
* more accurate name.
|
||||
*/
|
||||
private static final Pattern RX_PRE_VERSION = Pattern.compile("^(.+)[_-](\\d+\\.\\d{1,6})+");
|
||||
|
||||
/**
|
||||
* Private constructor for utility class.
|
||||
*/
|
||||
@@ -48,7 +57,8 @@ public final class DependencyVersionUtil {
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* A utility class to extract version numbers from file names (or other strings containing version numbers.</p>
|
||||
* A utility class to extract version numbers from file names (or other
|
||||
* strings containing version numbers.</p>
|
||||
* <pre>
|
||||
* Example:
|
||||
* Give the file name: library-name-1.4.1r2-release.jar
|
||||
@@ -95,4 +105,30 @@ public final class DependencyVersionUtil {
|
||||
}
|
||||
return new DependencyVersion(version);
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* A utility class to extract the part before version numbers from file
|
||||
* names (or other strings containing version numbers. In most cases, this
|
||||
* part represents a more accurate name than the full file name.</p>
|
||||
* <pre>
|
||||
* Example:
|
||||
* Give the file name: library-name-1.4.1r2-release.jar
|
||||
* This function would return: library-name</pre>
|
||||
*
|
||||
* @param text the text being analyzed
|
||||
* @return the part before the version numbers if any, otherwise return the
|
||||
* text itself.
|
||||
*/
|
||||
public static String parsePreVersion(String text) {
|
||||
if (parseVersion(text) == null) {
|
||||
return text;
|
||||
}
|
||||
|
||||
final Matcher matcher = RX_PRE_VERSION.matcher(text);
|
||||
if (matcher.find()) {
|
||||
return matcher.group(1);
|
||||
}
|
||||
return text;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -62,9 +62,17 @@ public class HintHandler extends DefaultHandler {
|
||||
*/
|
||||
private static final String DUPLICATE = "duplicate";
|
||||
/**
|
||||
* Attribute name.
|
||||
* Attribute value.
|
||||
*/
|
||||
private static final String VENDOR = "vendor";
|
||||
/**
|
||||
* Attribute value.
|
||||
*/
|
||||
private static final String PRODUCT = "product";
|
||||
/**
|
||||
* Attribute value.
|
||||
*/
|
||||
private static final String VERSION = "version";
|
||||
/**
|
||||
* Attribute name.
|
||||
*/
|
||||
@@ -168,16 +176,25 @@ public class HintHandler extends DefaultHandler {
|
||||
attr.getValue(VALUE),
|
||||
Confidence.valueOf(attr.getValue(CONFIDENCE)));
|
||||
}
|
||||
} else if (inAddNode) {
|
||||
rule.addAddProduct(attr.getValue(SOURCE),
|
||||
attr.getValue(NAME),
|
||||
attr.getValue(VALUE),
|
||||
Confidence.valueOf(attr.getValue(CONFIDENCE)));
|
||||
} else {
|
||||
rule.addGivenProduct(attr.getValue(SOURCE),
|
||||
attr.getValue(NAME),
|
||||
attr.getValue(VALUE),
|
||||
Confidence.valueOf(attr.getValue(CONFIDENCE)));
|
||||
} else if (PRODUCT.equals(hintType)) {
|
||||
if (inAddNode) {
|
||||
rule.addAddProduct(attr.getValue(SOURCE),
|
||||
attr.getValue(NAME),
|
||||
attr.getValue(VALUE),
|
||||
Confidence.valueOf(attr.getValue(CONFIDENCE)));
|
||||
} else {
|
||||
rule.addGivenProduct(attr.getValue(SOURCE),
|
||||
attr.getValue(NAME),
|
||||
attr.getValue(VALUE),
|
||||
Confidence.valueOf(attr.getValue(CONFIDENCE)));
|
||||
}
|
||||
} else if (VERSION.equals(hintType)) {
|
||||
if (inAddNode) {
|
||||
rule.addAddVersion(attr.getValue(SOURCE),
|
||||
attr.getValue(NAME),
|
||||
attr.getValue(VALUE),
|
||||
Confidence.valueOf(attr.getValue(CONFIDENCE)));
|
||||
}
|
||||
}
|
||||
} else if (FILE_NAME.equals(qName)) {
|
||||
final PropertyType pt = new PropertyType();
|
||||
|
||||
@@ -64,7 +64,7 @@ public class HintParser {
|
||||
/**
|
||||
* The schema for the hint XML files.
|
||||
*/
|
||||
private static final String HINT_SCHEMA = "schema/dependency-hint.1.0.xsd";
|
||||
private static final String HINT_SCHEMA = "schema/dependency-hint.1.1.xsd";
|
||||
|
||||
/**
|
||||
* Parses the given XML file and returns a list of the hints contained.
|
||||
@@ -104,10 +104,12 @@ public class HintParser {
|
||||
* @throws SAXException thrown if the XML cannot be parsed
|
||||
*/
|
||||
public Hints parseHints(InputStream inputStream) throws HintParseException, SAXException {
|
||||
InputStream schemaStream = null;
|
||||
try {
|
||||
final InputStream schemaStream = this.getClass().getClassLoader().getResourceAsStream(HINT_SCHEMA);
|
||||
schemaStream = this.getClass().getClassLoader().getResourceAsStream(HINT_SCHEMA);
|
||||
final HintHandler handler = new HintHandler();
|
||||
final SAXParserFactory factory = SAXParserFactory.newInstance();
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
factory.setNamespaceAware(true);
|
||||
factory.setValidating(true);
|
||||
final SAXParser saxParser = factory.newSAXParser();
|
||||
@@ -141,6 +143,14 @@ public class HintParser {
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new HintParseException(ex);
|
||||
} finally {
|
||||
if (schemaStream != null) {
|
||||
try {
|
||||
schemaStream.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Error closing hint file stream", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -85,6 +85,15 @@ public class HintRule {
|
||||
*/
|
||||
private final List<Evidence> givenVendor = new ArrayList<Evidence>();
|
||||
|
||||
/**
|
||||
* The list of product evidence to add.
|
||||
*/
|
||||
private final List<Evidence> addProduct = new ArrayList<Evidence>();
|
||||
/**
|
||||
* The list of version evidence to add.
|
||||
*/
|
||||
private final List<Evidence> addVersion = new ArrayList<Evidence>();
|
||||
|
||||
/**
|
||||
* Adds a given vendors to the list of evidence to matched.
|
||||
*
|
||||
@@ -106,11 +115,6 @@ public class HintRule {
|
||||
return givenVendor;
|
||||
}
|
||||
|
||||
/**
|
||||
* The list of product evidence to add.
|
||||
*/
|
||||
private final List<Evidence> addProduct = new ArrayList<Evidence>();
|
||||
|
||||
/**
|
||||
* Adds a given product to the list of evidence to add when matched.
|
||||
*
|
||||
@@ -132,6 +136,27 @@ public class HintRule {
|
||||
return addProduct;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a given version to the list of evidence to add when matched.
|
||||
*
|
||||
* @param source the source of the evidence
|
||||
* @param name the name of the evidence
|
||||
* @param value the value of the evidence
|
||||
* @param confidence the confidence of the evidence
|
||||
*/
|
||||
public void addAddVersion(String source, String name, String value, Confidence confidence) {
|
||||
addVersion.add(new Evidence(source, name, value, confidence));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of addVersion.
|
||||
*
|
||||
* @return the value of addVersion
|
||||
*/
|
||||
public List<Evidence> getAddVersion() {
|
||||
return addVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* The list of vendor hints to add.
|
||||
*/
|
||||
|
||||
@@ -348,7 +348,7 @@ public class Model {
|
||||
* Utility class that can provide values from a Properties object to a
|
||||
* StrSubstitutor.
|
||||
*/
|
||||
private static class PropertyLookup extends StrLookup {
|
||||
private static class PropertyLookup extends StrLookup<String> {
|
||||
|
||||
/**
|
||||
* Reference to the properties to lookup.
|
||||
|
||||
@@ -47,10 +47,12 @@ public class PomParser {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(PomParser.class);
|
||||
|
||||
/**
|
||||
* Parses the given xml file and returns a Model object containing only the fields dependency-check requires.
|
||||
* Parses the given xml file and returns a Model object containing only the
|
||||
* fields dependency-check requires.
|
||||
*
|
||||
* @param file a pom.xml
|
||||
* @return a Model object containing only the fields dependency-check requires
|
||||
* @return a Model object containing only the fields dependency-check
|
||||
* requires
|
||||
* @throws PomParseException thrown if the xml file cannot be parsed
|
||||
*/
|
||||
public Model parse(File file) throws PomParseException {
|
||||
@@ -73,7 +75,8 @@ public class PomParser {
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the given XML file and returns a Model object containing only the fields dependency-check requires.
|
||||
* Parses the given XML file and returns a Model object containing only the
|
||||
* fields dependency-check requires.
|
||||
*
|
||||
* @param inputStream an InputStream containing suppression rues
|
||||
* @return a list of suppression rules
|
||||
@@ -85,6 +88,7 @@ public class PomParser {
|
||||
final SAXParserFactory factory = SAXParserFactory.newInstance();
|
||||
// factory.setNamespaceAware(true);
|
||||
// factory.setValidating(true);
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
final SAXParser saxParser = factory.newSAXParser();
|
||||
final XMLReader xmlReader = saxParser.getXMLReader();
|
||||
xmlReader.setContentHandler(handler);
|
||||
|
||||
@@ -48,13 +48,17 @@ public final class PomUtils {
|
||||
*
|
||||
* @param file the pom.xml file
|
||||
* @return returns a
|
||||
* @throws AnalysisException is thrown if there is an exception extracting or parsing the POM {@link Model} object
|
||||
* @throws AnalysisException is thrown if there is an exception extracting
|
||||
* or parsing the POM {@link Model} object
|
||||
*/
|
||||
public static Model readPom(File file) throws AnalysisException {
|
||||
Model model = null;
|
||||
try {
|
||||
final PomParser parser = new PomParser();
|
||||
model = parser.parse(file);
|
||||
final Model model = parser.parse(file);
|
||||
if (model == null) {
|
||||
throw new AnalysisException(String.format("Unable to parse pom '%s'", file.getPath()));
|
||||
}
|
||||
return model;
|
||||
} catch (PomParseException ex) {
|
||||
LOGGER.warn("Unable to parse pom '{}'", file.getPath());
|
||||
LOGGER.debug("", ex);
|
||||
@@ -68,7 +72,6 @@ public final class PomUtils {
|
||||
LOGGER.debug("", ex);
|
||||
throw new AnalysisException(ex);
|
||||
}
|
||||
return model;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -77,7 +80,8 @@ public final class PomUtils {
|
||||
* @param path the path to the pom.xml file within the jar file
|
||||
* @param jar the jar file to extract the pom from
|
||||
* @return returns a
|
||||
* @throws AnalysisException is thrown if there is an exception extracting or parsing the POM {@link Model} object
|
||||
* @throws AnalysisException is thrown if there is an exception extracting
|
||||
* or parsing the POM {@link Model} object
|
||||
*/
|
||||
public static Model readPom(String path, JarFile jar) throws AnalysisException {
|
||||
final ZipEntry entry = jar.getEntry(path);
|
||||
@@ -86,7 +90,9 @@ public final class PomUtils {
|
||||
try {
|
||||
final PomParser parser = new PomParser();
|
||||
model = parser.parse(jar.getInputStream(entry));
|
||||
LOGGER.debug("Read POM {}", path);
|
||||
if (model == null) {
|
||||
throw new AnalysisException(String.format("Unable to parse pom '%s/%s'", jar.getName(), path));
|
||||
}
|
||||
} catch (SecurityException ex) {
|
||||
LOGGER.warn("Unable to parse pom '{}' in jar '{}'; invalid signature", path, jar.getName());
|
||||
LOGGER.debug("", ex);
|
||||
@@ -105,11 +111,13 @@ public final class PomUtils {
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads in the pom file and adds elements as evidence to the given dependency.
|
||||
* Reads in the pom file and adds elements as evidence to the given
|
||||
* dependency.
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param pomFile the pom file to read
|
||||
* @throws AnalysisException is thrown if there is an exception parsing the pom
|
||||
* @throws AnalysisException is thrown if there is an exception parsing the
|
||||
* pom
|
||||
*/
|
||||
public static void analyzePOM(Dependency dependency, File pomFile) throws AnalysisException {
|
||||
final Model pom = PomUtils.readPom(pomFile);
|
||||
|
||||
@@ -17,8 +17,6 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.xml.suppression;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.xml.sax.ErrorHandler;
|
||||
import org.xml.sax.SAXException;
|
||||
import org.xml.sax.SAXParseException;
|
||||
@@ -33,7 +31,7 @@ public class SuppressionErrorHandler implements ErrorHandler {
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(SuppressionErrorHandler.class);
|
||||
//private static final Logger LOGGER = LoggerFactory.getLogger(SuppressionErrorHandler.class);
|
||||
|
||||
/**
|
||||
* Builds a prettier exception message.
|
||||
|
||||
@@ -121,12 +121,14 @@ public class SuppressionParser {
|
||||
* @throws SAXException thrown if the XML cannot be parsed
|
||||
*/
|
||||
public List<SuppressionRule> parseSuppressionRules(InputStream inputStream) throws SuppressionParseException, SAXException {
|
||||
InputStream schemaStream = null;
|
||||
try {
|
||||
final InputStream schemaStream = this.getClass().getClassLoader().getResourceAsStream(SUPPRESSION_SCHEMA);
|
||||
schemaStream = this.getClass().getClassLoader().getResourceAsStream(SUPPRESSION_SCHEMA);
|
||||
final SuppressionHandler handler = new SuppressionHandler();
|
||||
final SAXParserFactory factory = SAXParserFactory.newInstance();
|
||||
factory.setNamespaceAware(true);
|
||||
factory.setValidating(true);
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
final SAXParser saxParser = factory.newSAXParser();
|
||||
saxParser.setProperty(SuppressionParser.JAXP_SCHEMA_LANGUAGE, SuppressionParser.W3C_XML_SCHEMA);
|
||||
saxParser.setProperty(SuppressionParser.JAXP_SCHEMA_SOURCE, new InputSource(schemaStream));
|
||||
@@ -157,6 +159,14 @@ public class SuppressionParser {
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new SuppressionParseException(ex);
|
||||
} finally {
|
||||
if (schemaStream != null) {
|
||||
try {
|
||||
schemaStream.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Error closing suppression file stream", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -169,8 +179,9 @@ public class SuppressionParser {
|
||||
* @throws SuppressionParseException if the XML cannot be parsed
|
||||
*/
|
||||
private List<SuppressionRule> parseOldSuppressionRules(InputStream inputStream) throws SuppressionParseException {
|
||||
InputStream schemaStream = null;
|
||||
try {
|
||||
final InputStream schemaStream = this.getClass().getClassLoader().getResourceAsStream(OLD_SUPPRESSION_SCHEMA);
|
||||
schemaStream = this.getClass().getClassLoader().getResourceAsStream(OLD_SUPPRESSION_SCHEMA);
|
||||
final SuppressionHandler handler = new SuppressionHandler();
|
||||
final SAXParserFactory factory = SAXParserFactory.newInstance();
|
||||
factory.setNamespaceAware(true);
|
||||
@@ -200,6 +211,14 @@ public class SuppressionParser {
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new SuppressionParseException(ex);
|
||||
} finally {
|
||||
if (schemaStream != null) {
|
||||
try {
|
||||
schemaStream.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Error closing old suppression file stream", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Binary file not shown.
@@ -22,3 +22,5 @@ org.owasp.dependencycheck.analyzer.RubyGemspecAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.RubyBundlerAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.RubyBundleAuditAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.ComposerLockAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.CocoaPodsAnalyzer
|
||||
org.owasp.dependencycheck.analyzer.SwiftPackageManagerAnalyzer
|
||||
|
||||
@@ -1,75 +1,120 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<hints xmlns="https://jeremylong.github.io/DependencyCheck/dependency-hint.1.0.xsd">
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="Manifest" name="Implementation-Title" value="Spring Framework" confidence="HIGH"/>
|
||||
<evidence type="product" source="Manifest" name="Implementation-Title" value="org.springframework.core" confidence="HIGH"/>
|
||||
<evidence type="product" source="Manifest" name="Implementation-Title" value="spring-core" confidence="HIGH"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="SpringSource" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="pivotal" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="jar" name="package name" value="springframework" confidence="LOW"/>
|
||||
<fileName contains="spring"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="SpringSource" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="pivotal" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="jar" name="package name" value="springframework" confidence="LOW"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="pivotal" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="Manifest" name="Bundle-Name" value="Spring Security Core" confidence="MEDIUM"/>
|
||||
<evidence type="product" source="pom" name="artifactid" value="spring-security-core" confidence="HIGH"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="SpringSource" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="vendor" source="composer.lock" name="vendor" value="symfony" confidence="HIGHEST"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="sensiolabs" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="vendor" source="composer.lock" name="vendor" value="zendframework" confidence="HIGHEST"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="zend" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="composer.lock" name="product" value="zendframework" confidence="HIGHEST"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="zend_framework" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<vendorDuplicatingHint value="sun" duplicate="oracle"/>
|
||||
<vendorDuplicatingHint value="oracle" duplicate="sun"/>
|
||||
<hints xmlns="https://jeremylong.github.io/DependencyCheck/dependency-hint.1.1.xsd">
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="Manifest" name="Implementation-Title" value="Spring Framework" confidence="HIGH"/>
|
||||
<evidence type="product" source="Manifest" name="Implementation-Title" value="org.springframework.core" confidence="HIGH"/>
|
||||
<evidence type="product" source="Manifest" name="Implementation-Title" value="spring-core" confidence="HIGH"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="SpringSource" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="pivotal software" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="jar" name="package name" value="springframework" confidence="LOW"/>
|
||||
<fileName contains="spring"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="SpringSource" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="pivotal software" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="jar" name="package name" value="springframework" confidence="LOW"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="pivotal software" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="Manifest" name="Bundle-Name" value="Spring Security Core" confidence="MEDIUM"/>
|
||||
<evidence type="product" source="pom" name="artifactid" value="spring-security-core" confidence="HIGH"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="product" source="hint analyzer" name="product" value="springsource_spring_framework" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="SpringSource" confidence="HIGH"/>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="vmware" confidence="HIGH"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="vendor" source="composer.lock" name="vendor" value="symfony" confidence="HIGHEST"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="sensiolabs" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="vendor" source="composer.lock" name="vendor" value="zendframework" confidence="HIGHEST"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="zend" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="composer.lock" name="product" value="zendframework" confidence="HIGHEST"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="vendor" source="hint analyzer" name="vendor" value="zend_framework" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
|
||||
<!-- begin hack for temporary patch of issue #534-->
|
||||
<hint>
|
||||
<given>
|
||||
<fileName regex="true" contains=".*hibernate-validator-5\.0\..*"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="version" source="hint" name="version" value="5.0" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<fileName regex="true" contains=".*hibernate-validator-5\.1\.[01].*"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="version" source="hint" name="version" value="5.1" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<fileName regex="true" contains=".*hibernate-validator-4\.1\..*"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="version" source="hint" name="version" value="4.1.0" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<fileName regex="true" contains=".*hibernate-validator-4\.2\.0.*"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="version" source="hint" name="version" value="4.2.0" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<hint>
|
||||
<given>
|
||||
<fileName regex="true" contains=".*hibernate-validator-4\.3\.[01]\..*"/>
|
||||
</given>
|
||||
<add>
|
||||
<evidence type="version" source="hint" name="version" value="4.3.0" confidence="HIGHEST"/>
|
||||
</add>
|
||||
</hint>
|
||||
<!-- end hack for temporary patch of issue #534-->
|
||||
|
||||
|
||||
<vendorDuplicatingHint value="sun" duplicate="oracle"/>
|
||||
<vendorDuplicatingHint value="oracle" duplicate="sun"/>
|
||||
</hints>
|
||||
@@ -8,6 +8,8 @@
|
||||
<cpe>cpe:/a:mod_security:mod_security</cpe>
|
||||
<cpe>cpe:/a:springsource:spring_framework</cpe>
|
||||
<cpe>cpe:/a:vmware:springsource_spring_framework</cpe>
|
||||
<cpe>cpe:/a:pivotal:spring_framework</cpe>
|
||||
<cpe>cpe:/a:pivotal_software:spring_framework</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
@@ -18,6 +20,7 @@
|
||||
<cpe>cpe:/a:springsource:spring_framework</cpe>
|
||||
<cpe>cpe:/a:vmware:springsource_spring_framework</cpe>
|
||||
<cpe>cpe:/a:pivotal:spring_framework</cpe>
|
||||
<cpe>cpe:/a:pivotal_software:spring_framework</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
@@ -333,6 +336,8 @@
|
||||
<filePath regex="true">.*\.(jar|ear|war|pom)</filePath>
|
||||
<cpe>cpe:/a:pam:pam</cpe>
|
||||
<cpe>cpe:/a:pam_ssh:pam_ssh</cpe>
|
||||
<cpe>cpe:/a:sun:linux</cpe>
|
||||
<cpe>cpe:/a:sun:sunos</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
@@ -410,7 +415,7 @@
|
||||
<notes><![CDATA[
|
||||
Aether false positive.
|
||||
]]></notes>
|
||||
<gav regex="true">org.eclipse.aether:aether.*</gav>
|
||||
<gav regex="true">org\.eclipse\.aether:aether.*</gav>
|
||||
<cpe>cpe:/a:eclipse:eclipse_ide</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
@@ -420,4 +425,32 @@
|
||||
<filePath regex="true">.*\.(jar|ear|war|pom)</filePath>
|
||||
<cpe>cpe:/a:services_project:services</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
jenkins-client false positives
|
||||
]]></notes>
|
||||
<gav regex="true">com\.offbytwo\.jenkins:jenkins-client:.*</gav>
|
||||
<cpe>cpe:/a:jenkins:jenkins</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
xstream false positives
|
||||
]]></notes>
|
||||
<gav regex="true">^(?!com.thoughtworks).*xstream.*$</gav>
|
||||
<cpe>cpe:/a:x-stream:xstream</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
false positive per issue #582
|
||||
]]></notes>
|
||||
<gav regex="true">^org\.glassfish\.jersey\.ext:jersey-proxy-client:.*$</gav>
|
||||
<cpe>cpe:/a:oracle:oracle_client</cpe>
|
||||
</suppress>
|
||||
<suppress base="true">
|
||||
<notes><![CDATA[
|
||||
file name: smiley-http-proxy-servlet-1.7.jar
|
||||
]]></notes>
|
||||
<gav regex="true">^org\.mitre\.dsmiley\.httpproxy:smiley-http-proxy-servlet:.*$</gav>
|
||||
<cpe>cpe:/a:shttp:shttp</cpe>
|
||||
</suppress>
|
||||
</suppressions>
|
||||
|
||||
@@ -4,7 +4,7 @@ autoupdate=true
|
||||
max.download.threads=3
|
||||
|
||||
# the url to obtain the current engine version from
|
||||
engine.version.url=http://jeremylong.github.io/DependencyCheck/current.txt
|
||||
engine.version.url=https://jeremylong.github.io/DependencyCheck/current.txt
|
||||
|
||||
#temp.directory defaults to System.getProperty("java.io.tmpdir")
|
||||
#temp.directory=[path to temp directory]
|
||||
@@ -62,7 +62,7 @@ cve.url-2.0.base=https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml.gz
|
||||
cve.cpe.startswith.filter=cpe:/a:
|
||||
|
||||
cpe.validfordays=30
|
||||
cpe.url=http://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.3.xml.gz
|
||||
cpe.url=https://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.3.xml.gz
|
||||
|
||||
# the URL for searching Nexus for SHA-1 hashes and whether it's enabled
|
||||
analyzer.nexus.enabled=true
|
||||
@@ -73,13 +73,14 @@ analyzer.nexus.proxy=true
|
||||
|
||||
# the URL for searching search.maven.org for SHA-1 and whether it's enabled
|
||||
analyzer.central.enabled=true
|
||||
analyzer.central.url=http://search.maven.org/solrsearch/select
|
||||
analyzer.central.url=https://search.maven.org/solrsearch/select
|
||||
|
||||
# the number of nested archives that will be searched.
|
||||
archive.scan.depth=3
|
||||
|
||||
# use HEAD (default) or GET as HTTP request method for query timestamp
|
||||
downloader.quick.query.timestamp=true
|
||||
downloader.tls.protocols=TLSv1,TLSv1.1,TLSv1.2,TLSv1.3
|
||||
|
||||
analyzer.experimental.enabled=false
|
||||
analyzer.jar.enabled=true
|
||||
@@ -96,6 +97,8 @@ analyzer.nuspec.enabled=true
|
||||
analyzer.openssl.enabled=true
|
||||
analyzer.central.enabled=true
|
||||
analyzer.nexus.enabled=false
|
||||
analyzer.cocoapods.enabled=true
|
||||
analyzer.swift.package.manager.enabled=true
|
||||
#whether the nexus analyzer uses the proxy
|
||||
analyzer.nexus.proxy=true
|
||||
|
||||
|
||||
@@ -0,0 +1,82 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xs:schema id="hints"
|
||||
xmlns:xs="http://www.w3.org/2001/XMLSchema"
|
||||
elementFormDefault="qualified"
|
||||
targetNamespace="https://jeremylong.github.io/DependencyCheck/dependency-hint.1.1.xsd"
|
||||
xmlns:dc="https://jeremylong.github.io/DependencyCheck/dependency-hint.1.1.xsd">
|
||||
|
||||
<xs:simpleType name="givenType">
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="vendor"/>
|
||||
<xs:enumeration value="product"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<xs:simpleType name="addType">
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="vendor"/>
|
||||
<xs:enumeration value="product"/>
|
||||
<xs:enumeration value="version"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<xs:simpleType name="confidence">
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="HIGHEST"/>
|
||||
<xs:enumeration value="HIGH"/>
|
||||
<xs:enumeration value="MEDIUM"/>
|
||||
<xs:enumeration value="LOW"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
<xs:complexType name="givenEvidence">
|
||||
<xs:attribute name="type" use="required" type="dc:givenType"/>
|
||||
<xs:attribute name="source" use="required" type="xs:string"/>
|
||||
<xs:attribute name="name" use="required" type="xs:string"/>
|
||||
<xs:attribute name="value" use="required" type="xs:string"/>
|
||||
<xs:attribute name="confidence" use="required" type="dc:confidence"/>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="addEvidence">
|
||||
<xs:attribute name="type" use="required" type="dc:addType"/>
|
||||
<xs:attribute name="source" use="required" type="xs:string"/>
|
||||
<xs:attribute name="name" use="required" type="xs:string"/>
|
||||
<xs:attribute name="value" use="required" type="xs:string"/>
|
||||
<xs:attribute name="confidence" use="required" type="dc:confidence"/>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="fileName">
|
||||
<xs:attribute name="contains" use="required" type="xs:string"/>
|
||||
<xs:attribute name="regex" use="optional" type="xs:boolean" default="false"/>
|
||||
<xs:attribute name="caseSensitive" use="optional" type="xs:boolean" default="false"/>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="given">
|
||||
<xs:choice minOccurs="1" maxOccurs="unbounded">
|
||||
<xs:element name="evidence" type="dc:givenEvidence"/>
|
||||
<xs:element name="fileName" type="dc:fileName"/>
|
||||
</xs:choice>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="add">
|
||||
<xs:sequence minOccurs="1" maxOccurs="unbounded">
|
||||
<xs:element name="evidence" type="dc:addEvidence"/>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="hint">
|
||||
<xs:sequence minOccurs="1" maxOccurs="1">
|
||||
<xs:element name="given" type="dc:given"/>
|
||||
<xs:element name="add" type="dc:add"/>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
<xs:complexType name="duplicatingHint">
|
||||
<xs:attribute name="value" use="required" type="xs:string"/>
|
||||
<xs:attribute name="duplicate" use="required" type="xs:string"/>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:element name="hints">
|
||||
<xs:complexType>
|
||||
<xs:sequence minOccurs="0" maxOccurs="unbounded">
|
||||
<xs:sequence minOccurs="0" maxOccurs="unbounded">
|
||||
<xs:element name="hint" type="dc:hint"/>
|
||||
</xs:sequence>
|
||||
<xs:sequence minOccurs="0" maxOccurs="unbounded">
|
||||
<xs:element name="vendorDuplicatingHint" type="dc:duplicatingHint"/>
|
||||
</xs:sequence>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
</xs:schema>
|
||||
@@ -83,16 +83,68 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
xml += $("#modal-text").text().replace(/\n/g,'\n ');
|
||||
xml += '\n</suppressions>';
|
||||
$('#modal-text').text(xml).focus().select();
|
||||
$('#modal-add-header').toggleClass('active');
|
||||
});
|
||||
});
|
||||
function copyText(name, sha1, type, val) {
|
||||
function suppressSwitchTo(switchTo) {
|
||||
$('#modal-suppress-change-to-sha1').toggleClass('active');
|
||||
$('#modal-suppress-change-to-gav').toggleClass('active');
|
||||
setCopyText($('#suppress-name').val(),
|
||||
switchTo,
|
||||
$('#suppress-'+switchTo).val(),
|
||||
$('#suppress-type').val(),
|
||||
$('#suppress-val').val());
|
||||
}
|
||||
function copyText(name, sha1, gav, type, val) {
|
||||
$('#suppress-name').val(name);
|
||||
$('#suppress-type').val(type);
|
||||
$('#suppress-val').val(val);
|
||||
$('#suppress-sha1').val(sha1);
|
||||
$('#suppress-gav').val(gav);
|
||||
if (gav=='') {
|
||||
if ($('#modal-suppress-change-to-gav').hasClass('active')) {
|
||||
$('#modal-suppress-change-to-gav').toggleClass('active');
|
||||
}
|
||||
if ($('#modal-suppress-change-to-sha1').hasClass('active')) {
|
||||
$('#modal-suppress-change-to-sha1').toggleClass('active');
|
||||
}
|
||||
setCopyText(name, 'sha1', sha1, type, val);
|
||||
} else {
|
||||
if ($('#modal-suppress-change-to-gav').hasClass('active')) {
|
||||
$('#modal-suppress-change-to-gav').toggleClass('active');
|
||||
}
|
||||
if (!$('#modal-suppress-change-to-sha1').hasClass('active')) {
|
||||
$('#modal-suppress-change-to-sha1').toggleClass('active');
|
||||
}
|
||||
setCopyText(name, 'gav', gav, type, val);
|
||||
}
|
||||
}
|
||||
function setCopyText(name, matchType, matchValue, suppressType, suppressVal) {
|
||||
xml = '<suppress>\n';
|
||||
xml += ' <notes><!'+'[CDATA[\n file name: ' + name + '\n ]]'+'></notes>\n';
|
||||
xml += ' <sha1>' + sha1 + '</sha1>\n';
|
||||
xml += ' <'+type+'>' + val + '</'+type+'>\n';
|
||||
if (matchType=='gav') {
|
||||
v = matchValue.match(/^[^:]+:[^:]+:/);
|
||||
if (v && v[0]) {
|
||||
xml += ' <'+matchType+' regex="true">^' + v[0].replace(/\./g,'\\.') + '.*$</'+matchType+'>\n';
|
||||
} else {
|
||||
xml += ' <'+matchType+'>' + matchValue + '</'+matchType+'>\n';
|
||||
}
|
||||
} else {
|
||||
xml += ' <'+matchType+'>' + matchValue + '</'+matchType+'>\n';
|
||||
}
|
||||
if (suppressType=='cpe') {
|
||||
v = suppressVal.match(/^cpe:\/a:[^:]+:[^:]+/);
|
||||
if (v && v[0]) {
|
||||
xml += ' <'+suppressType+'>' + v[0] + '</'+suppressType+'>\n';
|
||||
} else {
|
||||
xml += ' <'+suppressType+'>' + suppressVal + '</'+suppressType+'>\n';
|
||||
}
|
||||
} else {
|
||||
xml += ' <'+suppressType+'>' + suppressVal + '</'+suppressType+'>\n';
|
||||
}
|
||||
xml += '</suppress>';
|
||||
$('#modal-text').text(xml);
|
||||
$('#modal-content,#modal-background').toggleClass('active');
|
||||
$('#modal-content,#modal-background').addClass('active');
|
||||
$('#modal-text').focus();
|
||||
$('#modal-text').select();
|
||||
}
|
||||
@@ -150,6 +202,12 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
#modal-text:focus {
|
||||
outline: none;
|
||||
}
|
||||
.suppresstype {
|
||||
display: none;
|
||||
}
|
||||
.suppresstype.active {
|
||||
display: block;
|
||||
}
|
||||
.suppressedLabel {
|
||||
cursor: default;
|
||||
padding:1px;
|
||||
@@ -504,6 +562,11 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<div id="modal-background"></div>
|
||||
<div id="modal-content">
|
||||
<div>Press CTR-C to copy XML <a href="http://jeremylong.github.io/DependencyCheck/general/suppression.html" class="infolink" target="_blank" title="Help with suppressing false positives">[help]</a></div>
|
||||
<button onclick="suppressSwitchTo('gav')" id="modal-suppress-change-to-gav" class="modal-button suppresstype" title="Supress by Maven Group Artifact Version">Suppress By GAV</button>
|
||||
<button onclick="suppressSwitchTo('sha1')" id="modal-suppress-change-to-sha1" class="modal-button suppresstype" title="Supress by SHA1 hash">Suppress By SHA1</button><br/>
|
||||
<input type="hidden" id="suppress-name"/>
|
||||
<input type="hidden" id="suppress-type"/><input type="hidden" id="suppress-val"/>
|
||||
<input type="hidden" id="suppress-sha1"/><input type="hidden" id="suppress-gav"/>
|
||||
<textarea id="modal-text" cols="50" rows="10" readonly></textarea><br/>
|
||||
<button id="modal-add-header" title="Add the parent XML nodes to create the complete XML file that can be used to suppress this finding" class="modal-button">Complete XML Doc</button><button id="modal-close" class="modal-button-right">Close</button>
|
||||
</div>
|
||||
@@ -515,6 +578,10 @@ the reporting provided constitutes acceptance for use in an AS IS condition, and
|
||||
implied or otherwise, with regard to the analysis or its use. Any use of the tool and the reporting provided
|
||||
is at the user’s risk. In no event shall the copyright holder or OWASP be held liable for any damages whatsoever
|
||||
arising out of or in connection with the use of this tool, the analysis performed, or the resulting report.</p>
|
||||
<h3><a href="http://jeremylong.github.io/DependencyCheck/general/thereport.html" target="_bank">How to read the report</a> |
|
||||
<a href="http://jeremylong.github.io/DependencyCheck/general/suppression.html" target="_bank">Suppressing false positives</a> |
|
||||
Getting Help: <a href="https://groups.google.com/forum/#!forum/dependency-check" target="_blank">google group</a> |
|
||||
<a href="https://github.com/jeremylong/DependencyCheck/issues" target="_blank">github issues</a></h3>
|
||||
]]#
|
||||
<h2 class="">Project: $enc.html($applicationName)</h2>
|
||||
<div class="">
|
||||
@@ -577,8 +644,8 @@ arising out of or in connection with the use of this tool, the analysis performe
|
||||
#end
|
||||
<td data-sort-value="$sortValue">
|
||||
#set($sortValue="")
|
||||
#set($cpeSort=0)
|
||||
#foreach($id in $dependency.getIdentifiers())
|
||||
#set($cpeSort=0)
|
||||
#if ($id.type=="maven")
|
||||
#if ($mavenlink=="" || !$mavenlink.url)
|
||||
#set($mavenlink=$id)
|
||||
@@ -725,6 +792,12 @@ arising out of or in connection with the use of this tool, the analysis performe
|
||||
<ul><li><b>None</b></li></ul>
|
||||
#else ## ($dependency.getIdentifiers().size()>0)
|
||||
<ul>
|
||||
#set($suppressGav='')
|
||||
#foreach($id in $dependency.getIdentifiers())
|
||||
#if ($id.type=="maven")
|
||||
#set($suppressGav=$id.value)
|
||||
#end
|
||||
#end
|
||||
#foreach($id in $dependency.getIdentifiers())
|
||||
#if( $id.url )
|
||||
##yes, we are HTML Encoding the href. this is okay. We can't URL encode as we have to trust the analyzer here...
|
||||
@@ -737,7 +810,7 @@ arising out of or in connection with the use of this tool, the analysis performe
|
||||
#end
|
||||
#if ($id.type=="cpe")
|
||||
##yes, we are HTML Encoding into JavaScript... the escape utils don't have a JS Encode and I haven't written one yet
|
||||
<button class="copybutton" title="Generate Suppression XML for this CPE for this file" onclick="copyText('$enc.html($dependency.FileNameForJavaScript)', '$enc.html($dependency.Sha1sum)', 'cpe', '$enc.html($id.value)')">suppress</button>
|
||||
<button class="copybutton" title="Generate Suppression XML for this CPE for this file" onclick="copyText('$enc.html($dependency.FileNameForJavaScript)', '$enc.html($dependency.Sha1sum)', '$enc.html($suppressGav)', 'cpe', '$enc.html($id.value)')">suppress</button>
|
||||
#end
|
||||
#if ($id.description)
|
||||
<br/>$enc.html($id.description)
|
||||
@@ -753,7 +826,7 @@ arising out of or in connection with the use of this tool, the analysis performe
|
||||
<div id="content$cnt" class="subsectioncontent standardsubsection">
|
||||
#foreach($vuln in $dependency.getVulnerabilities())
|
||||
#set($vsctr=$vsctr+1)
|
||||
<p><b><a target="_blank" href="http://web.nvd.nist.gov/view/vuln/detail?vulnId=$enc.url($vuln.name)">$enc.html($vuln.name)</a></b> <button class="copybutton" title="Generate Suppression XML for this CCE for this file" onclick="copyText('$enc.html($dependency.FileNameForJavaScript)', '$enc.html($dependency.Sha1sum)', 'cve', '$enc.html($vuln.name)')">suppress</button></p>
|
||||
<p><b><a target="_blank" href="http://web.nvd.nist.gov/view/vuln/detail?vulnId=$enc.url($vuln.name)">$enc.html($vuln.name)</a></b> <button class="copybutton" title="Generate Suppression XML for this CCE for this file" onclick="copyText('$enc.html($dependency.FileNameForJavaScript)', '$enc.html($dependency.Sha1sum)', '$enc.html($suppressGav)', 'cve', '$enc.html($vuln.name)')">suppress</button></p>
|
||||
<p>Severity:
|
||||
#if ($vuln.cvssScore<4.0)
|
||||
Low
|
||||
|
||||
@@ -177,6 +177,11 @@ the reporting provided constitutes acceptance for use in an AS IS condition, and
|
||||
implied or otherwise, with regard to the analysis or its use. Any use of the tool and the reporting provided
|
||||
is at the user’s risk. In no event shall the copyright holder or OWASP be held liable for any damages whatsoever
|
||||
arising out of or in connection with the use of this tool, the analysis performed, or the resulting report.</p>
|
||||
<h3>About The Vulnerability Report | Getting Help: <a href="https://groups.google.com/forum/#!forum/dependency-check" target="_blank">google group</a> |
|
||||
<a href="https://github.com/jeremylong/DependencyCheck/issues" target="_blank">github issues</a></h3>
|
||||
<p>This report is intended to be a quick summary of findings. It is highly recommended that you use the full HTML
|
||||
report to determine if any <a href="http://jeremylong.github.io/DependencyCheck/general/suppression.html">false positives</a>
|
||||
have been reported. Additionally, the HTML report provides many features not found in the vulnerability report.</p>
|
||||
]]#
|
||||
<h2 class="sectionheader white">Vulnerability Report for $enc.html($applicationName)</h2>
|
||||
<div class="sectioncontent">Report Generated On: $scanDate<br/><br/>
|
||||
@@ -190,7 +195,7 @@ arising out of or in connection with the use of this tool, the analysis performe
|
||||
#end
|
||||
#end
|
||||
Dependencies Scanned: $depCount<br/>
|
||||
Vulnerable Dependencies: $vulnCount<br/><br/>
|
||||
Vulnerable Dependencies: <span id="volnCount">$vulnCount</span><br/><br/>
|
||||
<h2>Vulnerable Dependencies</h2>
|
||||
#set($cnt=0)
|
||||
<table id="vulnTable" class="lined">
|
||||
@@ -222,10 +227,10 @@ arising out of or in connection with the use of this tool, the analysis performe
|
||||
($vuln.cvssScore)
|
||||
<td>#set($cnt=$cnt+1)
|
||||
#if($dependency.getRelatedDependencies().size()>0)<span id="header$cnt" class="expandable collapsedList">#end
|
||||
$enc.html($dependency.DisplayFileName)
|
||||
<span title="$enc.html($dependency.FilePath)">$enc.html($dependency.DisplayFileName)</span>
|
||||
#if($dependency.getRelatedDependencies().size()>0) </span><div id="content$cnt" class="hidden">#end
|
||||
#foreach($related in $dependency.getRelatedDependencies())
|
||||
$enc.html($related.DisplayFileName)<br/>
|
||||
<span title="$enc.html($related.FilePath)">$enc.html($related.DisplayFileName)</span><br/>
|
||||
#end
|
||||
#if($dependency.getRelatedDependencies().size()>0)</div#end
|
||||
</td>
|
||||
|
||||
@@ -0,0 +1,100 @@
|
||||
package org.owasp.dependencycheck;
|
||||
|
||||
import mockit.Expectations;
|
||||
import mockit.Mocked;
|
||||
import mockit.Verifications;
|
||||
import org.junit.Test;
|
||||
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
|
||||
import org.owasp.dependencycheck.analyzer.HintAnalyzer;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class AnalysisTaskTest {
|
||||
|
||||
@Mocked
|
||||
FileTypeAnalyzer fileTypeAnalyzer;
|
||||
|
||||
@Mocked
|
||||
Dependency dependency;
|
||||
|
||||
@Mocked
|
||||
Engine engine;
|
||||
|
||||
|
||||
@Test
|
||||
public void shouldAnalyzeReturnsTrueForNonFileTypeAnalyzers() {
|
||||
AnalysisTask instance = new AnalysisTask(new HintAnalyzer(), null, null, null);
|
||||
boolean shouldAnalyze = instance.shouldAnalyze();
|
||||
assertTrue(shouldAnalyze);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void shouldAnalyzeReturnsTrueIfTheFileTypeAnalyzersAcceptsTheDependency() {
|
||||
final File dependencyFile = new File("");
|
||||
new Expectations() {{
|
||||
dependency.getActualFile();
|
||||
result = dependencyFile;
|
||||
|
||||
fileTypeAnalyzer.accept(dependencyFile);
|
||||
result = true;
|
||||
}};
|
||||
|
||||
AnalysisTask analysisTask = new AnalysisTask(fileTypeAnalyzer, dependency, null, null);
|
||||
|
||||
boolean shouldAnalyze = analysisTask.shouldAnalyze();
|
||||
assertTrue(shouldAnalyze);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void shouldAnalyzeReturnsFalseIfTheFileTypeAnalyzerDoesNotAcceptTheDependency() {
|
||||
final File dependencyFile = new File("");
|
||||
new Expectations() {{
|
||||
dependency.getActualFile();
|
||||
result = dependencyFile;
|
||||
|
||||
fileTypeAnalyzer.accept(dependencyFile);
|
||||
result = false;
|
||||
}};
|
||||
|
||||
AnalysisTask analysisTask = new AnalysisTask(fileTypeAnalyzer, dependency, null, null);
|
||||
|
||||
boolean shouldAnalyze = analysisTask.shouldAnalyze();
|
||||
assertFalse(shouldAnalyze);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void taskAnalyzes() throws Exception {
|
||||
final AnalysisTask analysisTask = new AnalysisTask(fileTypeAnalyzer, dependency, engine, null);
|
||||
new Expectations(analysisTask) {{
|
||||
analysisTask.shouldAnalyze();
|
||||
result = true;
|
||||
}};
|
||||
|
||||
analysisTask.call();
|
||||
|
||||
new Verifications() {{
|
||||
fileTypeAnalyzer.analyze(dependency, engine);
|
||||
times = 1;
|
||||
}};
|
||||
}
|
||||
|
||||
@Test
|
||||
public void taskDoesNothingIfItShouldNotAnalyze() throws Exception {
|
||||
final AnalysisTask analysisTask = new AnalysisTask(fileTypeAnalyzer, dependency, engine, null);
|
||||
new Expectations(analysisTask) {{
|
||||
analysisTask.shouldAnalyze();
|
||||
result = false;
|
||||
}};
|
||||
|
||||
analysisTask.call();
|
||||
|
||||
new Verifications() {{
|
||||
fileTypeAnalyzer.analyze(dependency, engine);
|
||||
times = 0;
|
||||
}};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,96 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2016 Jeremy Long. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck;
|
||||
|
||||
import mockit.Expectations;
|
||||
import mockit.Mocked;
|
||||
import org.junit.Test;
|
||||
import org.owasp.dependencycheck.analyzer.Analyzer;
|
||||
import org.owasp.dependencycheck.analyzer.JarAnalyzer;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.ExceptionCollection;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class EngineTest extends BaseDBTestCase {
|
||||
|
||||
@Mocked
|
||||
Analyzer analyzer;
|
||||
|
||||
@Mocked
|
||||
AnalysisTask analysisTask;
|
||||
|
||||
|
||||
/**
|
||||
* Test of scanFile method, of class Engine.
|
||||
*/
|
||||
@Test
|
||||
public void testScanFile() throws DatabaseException {
|
||||
Engine instance = new Engine();
|
||||
instance.addFileTypeAnalyzer(new JarAnalyzer());
|
||||
File file = BaseTest.getResourceAsFile(this, "dwr.jar");
|
||||
Dependency dwr = instance.scanFile(file);
|
||||
file = BaseTest.getResourceAsFile(this, "org.mortbay.jmx.jar");
|
||||
instance.scanFile(file);
|
||||
assertEquals(2, instance.getDependencies().size());
|
||||
|
||||
file = BaseTest.getResourceAsFile(this, "dwr.jar");
|
||||
Dependency secondDwr = instance.scanFile(file);
|
||||
|
||||
assertEquals(2, instance.getDependencies().size());
|
||||
assertTrue(dwr == secondDwr);
|
||||
}
|
||||
|
||||
@Test(expected = ExceptionCollection.class)
|
||||
public void exceptionDuringAnalysisTaskExecutionIsFatal() throws DatabaseException, ExceptionCollection {
|
||||
final ExecutorService executorService = Executors.newFixedThreadPool(3);
|
||||
final Engine instance = new Engine();
|
||||
final List<Throwable> exceptions = new ArrayList<Throwable>();
|
||||
|
||||
new Expectations() {{
|
||||
analysisTask.call();
|
||||
result = new IllegalStateException("Analysis task execution threw an exception");
|
||||
}};
|
||||
|
||||
final List<AnalysisTask> failingAnalysisTask = new ArrayList<AnalysisTask>();
|
||||
failingAnalysisTask.add(analysisTask);
|
||||
|
||||
new Expectations(instance) {{
|
||||
instance.getExecutorService(analyzer);
|
||||
result = executorService;
|
||||
|
||||
instance.getAnalysisTasks(analyzer, exceptions);
|
||||
result = failingAnalysisTask;
|
||||
}};
|
||||
|
||||
instance.executeAnalysisTasks(analyzer, exceptions);
|
||||
|
||||
assertTrue(executorService.isShutdown());
|
||||
}
|
||||
}
|
||||
@@ -24,6 +24,7 @@ import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import org.junit.Assume;
|
||||
import static org.junit.Assume.assumeFalse;
|
||||
import static org.junit.Assume.assumeNotNull;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.owasp.dependencycheck.BaseTest;
|
||||
@@ -61,6 +62,7 @@ public class AssemblyAnalyzerTest extends BaseTest {
|
||||
analyzer = new AssemblyAnalyzer();
|
||||
analyzer.accept(new File("test.dll")); // trick into "thinking it is active"
|
||||
analyzer.initialize();
|
||||
Assume.assumeTrue("Mono is not installed, skipping tests.", analyzer.buildArgumentList() == null);
|
||||
} catch (Exception e) {
|
||||
if (e.getMessage().contains("Could not execute .NET AssemblyAnalyzer")) {
|
||||
LOGGER.warn("Exception setting up AssemblyAnalyzer. Tests will be incomplete");
|
||||
@@ -81,7 +83,7 @@ public class AssemblyAnalyzerTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testAnalysis() throws Exception {
|
||||
//File f = new File(AssemblyAnalyzerTest.class.getClassLoader().getResource("GrokAssembly.exe").getPath());
|
||||
assumeNotNull(analyzer.buildArgumentList());
|
||||
File f = BaseTest.getResourceAsFile(this, "GrokAssembly.exe");
|
||||
Dependency d = new Dependency(f);
|
||||
analyzer.analyze(d, null);
|
||||
@@ -104,7 +106,7 @@ public class AssemblyAnalyzerTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testLog4Net() throws Exception {
|
||||
//File f = new File(AssemblyAnalyzerTest.class.getClassLoader().getResource("log4net.dll").getPath());
|
||||
assumeNotNull(analyzer.buildArgumentList());
|
||||
File f = BaseTest.getResourceAsFile(this, "log4net.dll");
|
||||
|
||||
Dependency d = new Dependency(f);
|
||||
@@ -116,9 +118,10 @@ public class AssemblyAnalyzerTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testNonexistent() {
|
||||
assumeNotNull(analyzer.buildArgumentList());
|
||||
|
||||
// Tweak the log level so the warning doesn't show in the console
|
||||
String oldProp = System.getProperty(LOG_KEY, "info");
|
||||
//File f = new File(AssemblyAnalyzerTest.class.getClassLoader().getResource("log4net.dll").getPath());
|
||||
File f = BaseTest.getResourceAsFile(this, "log4net.dll");
|
||||
File test = new File(f.getParent(), "nonexistent.dll");
|
||||
Dependency d = new Dependency(test);
|
||||
|
||||
@@ -17,23 +17,31 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import mockit.Mock;
|
||||
import mockit.MockUp;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.owasp.dependencycheck.BaseDBTestCase;
|
||||
import org.owasp.dependencycheck.BaseTest;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
import java.io.File;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
import static org.junit.Assert.*;
|
||||
import org.owasp.dependencycheck.BaseDBTestCase;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* Unit tests for CmakeAnalyzer.
|
||||
@@ -150,4 +158,21 @@ public class CMakeAnalyzerTest extends BaseDBTestCase {
|
||||
assertTrue("Expected version evidence to contain \"" + version + "\".",
|
||||
result.getVersionEvidence().toString().contains(version));
|
||||
}
|
||||
|
||||
@Test(expected = InitializationException.class)
|
||||
public void analyzerIsDisabledInCaseOfMissingMessageDigest() throws InitializationException {
|
||||
new MockUp<MessageDigest>() {
|
||||
@Mock
|
||||
MessageDigest getInstance(String ignore) throws NoSuchAlgorithmException {
|
||||
throw new NoSuchAlgorithmException();
|
||||
}
|
||||
};
|
||||
|
||||
analyzer = new CMakeAnalyzer();
|
||||
analyzer.setFilesMatched(true);
|
||||
assertTrue(analyzer.isEnabled());
|
||||
analyzer.initialize();
|
||||
|
||||
assertFalse(analyzer.isEnabled());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -189,6 +189,7 @@ public class CPEAnalyzerIntegrationTest extends BaseDBTestCase {
|
||||
instance.determineCPE(spring);
|
||||
instance.determineCPE(spring3);
|
||||
instance.close();
|
||||
|
||||
|
||||
String expResult = "cpe:/a:apache:struts:2.1.2";
|
||||
Identifier expIdentifier = new Identifier("cpe", expResult, expResult);
|
||||
|
||||
@@ -17,19 +17,25 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import mockit.Mock;
|
||||
import mockit.MockUp;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.owasp.dependencycheck.BaseDBTestCase;
|
||||
import org.owasp.dependencycheck.BaseTest;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
import java.io.File;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import org.owasp.dependencycheck.BaseDBTestCase;
|
||||
|
||||
/**
|
||||
* Unit tests for NodePackageAnalyzer.
|
||||
@@ -96,4 +102,22 @@ public class ComposerLockAnalyzerTest extends BaseDBTestCase {
|
||||
"composer.lock"));
|
||||
analyzer.analyze(result, engine);
|
||||
}
|
||||
|
||||
|
||||
@Test(expected = InitializationException.class)
|
||||
public void analyzerIsDisabledInCaseOfMissingMessageDigest() throws InitializationException {
|
||||
new MockUp<MessageDigest>() {
|
||||
@Mock
|
||||
MessageDigest getInstance(String ignore) throws NoSuchAlgorithmException {
|
||||
throw new NoSuchAlgorithmException();
|
||||
}
|
||||
};
|
||||
|
||||
analyzer = new ComposerLockAnalyzer();
|
||||
analyzer.setFilesMatched(true);
|
||||
assertTrue(analyzer.isEnabled());
|
||||
analyzer.initialize();
|
||||
|
||||
assertFalse(analyzer.isEnabled());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,17 +17,25 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import mockit.Mocked;
|
||||
import mockit.Verifications;
|
||||
import org.junit.Test;
|
||||
import org.owasp.dependencycheck.BaseTest;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class DependencyBundlingAnalyzerTest extends BaseTest {
|
||||
|
||||
@Mocked
|
||||
Engine engineMock;
|
||||
|
||||
/**
|
||||
* Test of getName method, of class DependencyBundlingAnalyzer.
|
||||
*/
|
||||
@@ -52,15 +60,27 @@ public class DependencyBundlingAnalyzerTest extends BaseTest {
|
||||
|
||||
/**
|
||||
* Test of analyze method, of class DependencyBundlingAnalyzer.
|
||||
* The actually passed dependency does not matter. The analyzer only runs once.
|
||||
*/
|
||||
@Test
|
||||
public void testAnalyze() throws Exception {
|
||||
// Dependency ignore = null;
|
||||
// Engine engine = null;
|
||||
// DependencyBundlingAnalyzer instance = new DependencyBundlingAnalyzer();
|
||||
// instance.analyze(ignore, engine);
|
||||
// // TODO review the generated test code and remove the default call to fail.
|
||||
// fail("The test case is a prototype.");
|
||||
DependencyBundlingAnalyzer instance = new DependencyBundlingAnalyzer();
|
||||
|
||||
// the actual dependency does not matter
|
||||
assertFalse(instance.getAnalyzed());
|
||||
instance.analyze(null, engineMock);
|
||||
|
||||
// the second runs basically does nothing
|
||||
assertTrue(instance.getAnalyzed());
|
||||
instance.analyze(null, engineMock);
|
||||
instance.analyze(null, engineMock);
|
||||
instance.analyze(null, engineMock);
|
||||
assertTrue(instance.getAnalyzed());
|
||||
|
||||
new Verifications() {{
|
||||
engineMock.getDependencies();
|
||||
times = 2;
|
||||
}};
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -119,7 +139,5 @@ public class DependencyBundlingAnalyzerTest extends BaseTest {
|
||||
expResult = true;
|
||||
result = instance.firstPathIsShortest(left, right);
|
||||
assertEquals(expResult, result);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -117,7 +117,6 @@ public class RubyBundleAuditAnalyzerTest extends BaseDBTestCase {
|
||||
final Engine engine = new Engine();
|
||||
analyzer.analyze(result, engine);
|
||||
int size = engine.getDependencies().size();
|
||||
|
||||
assertTrue(size >= 1);
|
||||
|
||||
Dependency dependency = engine.getDependencies().get(0);
|
||||
|
||||
@@ -0,0 +1,123 @@
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.owasp.dependencycheck.BaseTest;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
/**
|
||||
* Unit tests for CocoaPodsAnalyzer.
|
||||
*
|
||||
* @author Bianca Jiang
|
||||
*/
|
||||
public class SwiftAnalyzersTest extends BaseTest {
|
||||
|
||||
/**
|
||||
* The analyzer to test.
|
||||
*/
|
||||
CocoaPodsAnalyzer podsAnalyzer;
|
||||
SwiftPackageManagerAnalyzer spmAnalyzer;
|
||||
|
||||
/**
|
||||
* Correctly setup the analyzer for testing.
|
||||
*
|
||||
* @throws Exception thrown if there is a problem
|
||||
*/
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
podsAnalyzer = new CocoaPodsAnalyzer();
|
||||
podsAnalyzer.setFilesMatched(true);
|
||||
podsAnalyzer.initialize();
|
||||
|
||||
spmAnalyzer = new SwiftPackageManagerAnalyzer();
|
||||
spmAnalyzer.setFilesMatched(true);
|
||||
spmAnalyzer.initialize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup the analyzer's temp files, etc.
|
||||
*
|
||||
* @throws Exception thrown if there is a problem
|
||||
*/
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
podsAnalyzer.close();
|
||||
podsAnalyzer = null;
|
||||
|
||||
spmAnalyzer.close();
|
||||
spmAnalyzer = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of getName method, of class CocoaPodsAnalyzer.
|
||||
*/
|
||||
@Test
|
||||
public void testPodsGetName() {
|
||||
assertThat(podsAnalyzer.getName(), is("CocoaPods Package Analyzer"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of getName method, of class SwiftPackageManagerAnalyzer.
|
||||
*/
|
||||
@Test
|
||||
public void testSPMGetName() {
|
||||
assertThat(spmAnalyzer.getName(), is("SWIFT Package Manager Analyzer"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of supportsFiles method, of class CocoaPodsAnalyzer.
|
||||
*/
|
||||
@Test
|
||||
public void testPodsSupportsFiles() {
|
||||
assertThat(podsAnalyzer.accept(new File("test.podspec")), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of supportsFiles method, of class SwiftPackageManagerAnalyzer.
|
||||
*/
|
||||
@Test
|
||||
public void testSPMSupportsFiles() {
|
||||
assertThat(spmAnalyzer.accept(new File("Package.swift")), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of analyze method, of class CocoaPodsAnalyzer.
|
||||
*
|
||||
* @throws AnalysisException is thrown when an exception occurs.
|
||||
*/
|
||||
@Test
|
||||
public void testCocoaPodsAnalyzer() throws AnalysisException {
|
||||
final Dependency result = new Dependency(BaseTest.getResourceAsFile(this,
|
||||
"swift/cocoapods/EasyPeasy.podspec"));
|
||||
podsAnalyzer.analyze(result, null);
|
||||
final String vendorString = result.getVendorEvidence().toString();
|
||||
|
||||
assertThat(vendorString, containsString("Carlos Vidal"));
|
||||
assertThat(vendorString, containsString("https://github.com/nakiostudio/EasyPeasy"));
|
||||
assertThat(vendorString, containsString("MIT"));
|
||||
assertThat(result.getProductEvidence().toString(), containsString("EasyPeasy"));
|
||||
assertThat(result.getVersionEvidence().toString(), containsString("0.2.3"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of analyze method, of class SwiftPackageManagerAnalyzer.
|
||||
*
|
||||
* @throws AnalysisException is thrown when an exception occurs.
|
||||
*/
|
||||
@Test
|
||||
public void testSPMAnalyzer() throws AnalysisException {
|
||||
final Dependency result = new Dependency(BaseTest.getResourceAsFile(this,
|
||||
"swift/Gloss/Package.swift"));
|
||||
spmAnalyzer.analyze(result, null);
|
||||
|
||||
assertThat(result.getProductEvidence().toString(), containsString("Gloss"));
|
||||
}
|
||||
}
|
||||
@@ -26,6 +26,7 @@ import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.owasp.dependencycheck.BaseTest;
|
||||
import org.owasp.dependencycheck.analyzer.NexusAnalyzer;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -39,7 +40,7 @@ public class NexusSearchTest extends BaseTest {
|
||||
public void setUp() throws Exception {
|
||||
String nexusUrl = Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL);
|
||||
LOGGER.debug(nexusUrl);
|
||||
searcher = new NexusSearch(new URL(nexusUrl));
|
||||
searcher = new NexusSearch(new URL(nexusUrl), NexusAnalyzer.useProxy());
|
||||
Assume.assumeTrue(searcher.preflightRequest());
|
||||
}
|
||||
|
||||
|
||||
@@ -40,12 +40,11 @@ public class NvdCveUpdaterIntegrationTest extends BaseTest {
|
||||
// /**
|
||||
// * Test of update method, of class StandardUpdate.
|
||||
// */
|
||||
// @Test
|
||||
// public void testUpdate() throws Exception {
|
||||
// StandardUpdate instance = getStandardUpdateTask();
|
||||
// instance.update();
|
||||
// //TODO make this an actual test
|
||||
// }
|
||||
@Test
|
||||
public void testUpdate() throws Exception {
|
||||
NvdCveUpdater instance = getUpdater();
|
||||
instance.update();
|
||||
}
|
||||
/**
|
||||
* Test of updatesNeeded method, of class StandardUpdate.
|
||||
*/
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2013 Jeremy Long. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.update.nvd;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Calendar;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.owasp.dependencycheck.BaseTest;
|
||||
import org.owasp.dependencycheck.data.update.NvdCveUpdater;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class NvdCveUpdaterIntegrationTest extends BaseTest {
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
int year = Calendar.getInstance().get(Calendar.YEAR);
|
||||
if (year <= 2014) {
|
||||
//File f = new File(NvdCveUpdaterIntegrationTest.class.getClassLoader().getResource("nvdcve-2.0-2014.xml").getPath());
|
||||
File f = BaseTest.getResourceAsFile(this, "nvdcve-2.0-2014.xml");
|
||||
String baseURL = f.toURI().toURL().toString();
|
||||
String modified12 = baseURL.replace("nvdcve-2.0-2014.xml", "nvdcve-modified.xml");
|
||||
String modified20 = baseURL.replace("nvdcve-2.0-2014.xml", "nvdcve-2.0-modified.xml");
|
||||
String full12 = baseURL.replace("nvdcve-2.0-2014.xml", "nvdcve-%d.xml");
|
||||
String full20 = baseURL.replace("nvdcve-2.0-2014.xml", "nvdcve-2.0-%d.xml");
|
||||
// cve.url-1.2.modified=http://nvd.nist.gov/download/nvdcve-modified.xml
|
||||
// cve.url-2.0.modified=http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-modified.xml
|
||||
// cve.startyear=2014
|
||||
// cve.url-2.0.base=http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml
|
||||
// cve.url-1.2.base=http://nvd.nist.gov/download/nvdcve-%d.xml
|
||||
|
||||
Settings.setString(Settings.KEYS.CVE_MODIFIED_12_URL, modified12);
|
||||
Settings.setString(Settings.KEYS.CVE_MODIFIED_20_URL, modified20);
|
||||
Settings.setString(Settings.KEYS.CVE_SCHEMA_1_2, full12);
|
||||
Settings.setString(Settings.KEYS.CVE_SCHEMA_2_0, full20);
|
||||
Settings.setString(Settings.KEYS.CVE_START_YEAR, "2014");
|
||||
} else {
|
||||
System.err.println("Consider updating the local data files to make the NvdCveUpdaterIntegrationTest perform faster");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of update method, of class NvdCveUpdater.
|
||||
*/
|
||||
@Test
|
||||
public void testUpdate() throws Exception {
|
||||
NvdCveUpdater instance = new NvdCveUpdater();
|
||||
instance.update();
|
||||
}
|
||||
}
|
||||
@@ -96,6 +96,19 @@ public class DependencyVersionTest extends BaseTest {
|
||||
expResult = true;
|
||||
result = instance.equals(obj);
|
||||
assertEquals(expResult, result);
|
||||
|
||||
instance = new DependencyVersion("2.0.0");
|
||||
obj = new DependencyVersion("2");
|
||||
expResult = false;
|
||||
result = instance.equals(obj);
|
||||
assertEquals(expResult, result);
|
||||
|
||||
obj = new DependencyVersion("2.0");
|
||||
expResult = true;
|
||||
result = instance.equals(obj);
|
||||
assertEquals(expResult, result);
|
||||
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -52,7 +52,7 @@ public class HintHandlerTest extends BaseTest {
|
||||
@Test
|
||||
public void testHandler() throws ParserConfigurationException, SAXNotRecognizedException, SAXNotSupportedException, SAXException, FileNotFoundException, UnsupportedEncodingException, IOException {
|
||||
File file = BaseTest.getResourceAsFile(this, "hints.xml");
|
||||
File schema = BaseTest.getResourceAsFile(this, "schema/dependency-hint.1.0.xsd");
|
||||
File schema = BaseTest.getResourceAsFile(this, "schema/dependency-hint.1.1.xsd");
|
||||
HintHandler handler = new HintHandler();
|
||||
|
||||
SAXParserFactory factory = SAXParserFactory.newInstance();
|
||||
|
||||
@@ -75,6 +75,7 @@ archive.scan.depth=3
|
||||
|
||||
# use HEAD (default) or GET as HTTP request method for query timestamp
|
||||
downloader.quick.query.timestamp=true
|
||||
downloader.tls.protocols=TLSv1,TLSv1.1,TLSv1.2,TLSv1.3
|
||||
|
||||
analyzer.experimental.enabled=true
|
||||
analyzer.jar.enabled=true
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<hints xmlns="https://jeremylong.github.io/DependencyCheck/dependency-hint.1.0.xsd">
|
||||
<hints xmlns="https://jeremylong.github.io/DependencyCheck/dependency-hint.1.1.xsd">
|
||||
<hint>
|
||||
<given>
|
||||
<evidence type="product" source="product source" name="given product name" value="value" confidence="HIGH"/>
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
Pod::Spec.new do |s|
|
||||
s.name = "Gloss"
|
||||
s.version = "0.7.2"
|
||||
s.summary = "A shiny JSON parsing library in Swift"
|
||||
s.description = "A shiny JSON parsing library in Swift. Features include mapping JSON to objects, mapping objects to JSON, handling of nested objects and custom transformations."
|
||||
s.homepage = "https://github.com/hkellaway/Gloss"
|
||||
s.license = { :type => "MIT", :file => "LICENSE" }
|
||||
s.author = { "Harlan Kellaway" => "hello@harlankellaway.com" }
|
||||
s.social_media_url = "http://twitter.com/HarlanKellaway"
|
||||
s.source = { :git => "https://github.com/hkellaway/Gloss.git", :tag => s.version.to_s }
|
||||
|
||||
s.platforms = { :ios => "8.0", :osx => "10.9", :tvos => "9.0", :watchos => "2.0" }
|
||||
s.requires_arc = true
|
||||
|
||||
s.source_files = 'Sources/*.{swift}'
|
||||
|
||||
end
|
||||
@@ -0,0 +1,30 @@
|
||||
//
|
||||
// Package.swift
|
||||
// Gloss
|
||||
//
|
||||
// Copyright (c) 2015 Harlan Kellaway
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in
|
||||
// all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
// THE SOFTWARE.
|
||||
//
|
||||
|
||||
import PackageDescription
|
||||
|
||||
let package = Package(
|
||||
name: "Gloss"
|
||||
)
|
||||
@@ -0,0 +1,25 @@
|
||||
Pod::Spec.new do |s|
|
||||
s.name = "EasyPeasy"
|
||||
s.version = "0.2.3"
|
||||
s.summary = "EasyPeasy is a Swift framework that eases the creation of
|
||||
Autolayout constraints programmatically"
|
||||
s.description = <<-DESC
|
||||
EasyPeasy is a Swift framework that lets you create Autolayout constraints
|
||||
programmatically without headaches and never ending boilerplate code. Besides the
|
||||
basics, **EasyPeasy** resolves most of the constraint conflicts for you and lets
|
||||
you attach to a constraint conditional closures that are evaluated before applying
|
||||
a constraint, this lets you apply (or not) a constraint depending on platform, size
|
||||
classes, orientation... or the state of your controller, easy peasy!
|
||||
DESC
|
||||
s.homepage = "https://github.com/nakiostudio/EasyPeasy"
|
||||
s.license = 'MIT'
|
||||
s.author = { "Carlos Vidal" => "nakioparkour@gmail.com" }
|
||||
s.source = { :git => "https://github.com/nakiostudio/EasyPeasy.git", :tag => s.version.to_s }
|
||||
s.social_media_url = 'https://twitter.com/carlostify'
|
||||
|
||||
s.platform = :ios, '8.0'
|
||||
s.requires_arc = true
|
||||
|
||||
s.source_files = 'EasyPeasy/**/*'
|
||||
s.frameworks = 'UIKit'
|
||||
end
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2013 Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>1.4.2</version>
|
||||
<version>1.4.4</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-maven</artifactId>
|
||||
@@ -38,8 +38,11 @@ Copyright (c) 2013 Jeremy Long. All Rights Reserved.
|
||||
</distributionManagement>
|
||||
<!-- end copy -->
|
||||
<properties>
|
||||
<version.maven-plugin-plugin>3.4</version.maven-plugin-plugin>
|
||||
<version.maven-plugin-plugin>3.5</version.maven-plugin-plugin>
|
||||
</properties>
|
||||
<prerequisites>
|
||||
<maven>3.1</maven>
|
||||
</prerequisites>
|
||||
<build>
|
||||
<resources>
|
||||
<resource>
|
||||
@@ -208,6 +211,10 @@ Copyright (c) 2013 Jeremy Long. All Rights Reserved.
|
||||
<groupId>org.sonatype.plexus</groupId>
|
||||
<artifactId>plexus-sec-dispatcher</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.maven.shared</groupId>
|
||||
<artifactId>maven-dependency-tree</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jmockit</groupId>
|
||||
<artifactId>jmockit</artifactId>
|
||||
|
||||
@@ -19,10 +19,8 @@ package org.owasp.dependencycheck.maven;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
import org.apache.maven.plugin.MojoExecutionException;
|
||||
@@ -32,10 +30,7 @@ import org.apache.maven.plugins.annotations.Mojo;
|
||||
import org.apache.maven.plugins.annotations.Parameter;
|
||||
import org.apache.maven.plugins.annotations.ResolutionScope;
|
||||
import org.apache.maven.project.MavenProject;
|
||||
import org.owasp.dependencycheck.analyzer.DependencyBundlingAnalyzer;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.ExceptionCollection;
|
||||
import org.owasp.dependencycheck.exception.ReportException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
@@ -49,18 +44,13 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
@Mojo(
|
||||
name = "aggregate",
|
||||
defaultPhase = LifecyclePhase.VERIFY,
|
||||
/*aggregator = true,*/
|
||||
aggregator = true,
|
||||
threadSafe = false,
|
||||
requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME,
|
||||
requiresOnline = true
|
||||
)
|
||||
public class AggregateMojo extends BaseDependencyCheckMojo {
|
||||
|
||||
/**
|
||||
* The key to store aggregate exception in the root Maven execution context.
|
||||
*/
|
||||
private static final String AGGREGATE_EXCEPTIONS = "AggregateExceptions";
|
||||
|
||||
/**
|
||||
* Executes the aggregate dependency-check goal. This runs dependency-check
|
||||
* and generates the subsequent reports.
|
||||
@@ -72,108 +62,78 @@ public class AggregateMojo extends BaseDependencyCheckMojo {
|
||||
*/
|
||||
@Override
|
||||
public void runCheck() throws MojoExecutionException, MojoFailureException {
|
||||
final MavenEngine engine = generateDataFile();
|
||||
final MavenEngine engine = loadEngine();
|
||||
if (engine == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (getProject() == getLastProject()) {
|
||||
//ensure that the .ser file was created for each.
|
||||
for (MavenProject current : getReactorProjects()) {
|
||||
final File dataFile = getDataFile(current);
|
||||
if (dataFile == null && !skipProject(current)) { //dc was never run on this project. write the ser to the target.
|
||||
getLog().error(String.format("Module '%s' did not execute dependency-check; an attempt will be made to perform "
|
||||
+ "the check but dependencies may be missed resulting in false negatives.", current.getName()));
|
||||
generateDataFile(engine, current);
|
||||
}
|
||||
}
|
||||
for (MavenProject current : getReactorProjects()) {
|
||||
List<Dependency> dependencies = readDataFile(current);
|
||||
if (dependencies == null) {
|
||||
dependencies = new ArrayList<Dependency>();
|
||||
}
|
||||
final Set<MavenProject> childProjects = getDescendants(current);
|
||||
for (MavenProject reportOn : childProjects) {
|
||||
final List<Dependency> childDeps = readDataFile(reportOn);
|
||||
if (childDeps != null && !childDeps.isEmpty()) {
|
||||
if (getLog().isDebugEnabled()) {
|
||||
getLog().debug(String.format("Adding %d dependencies from %s", childDeps.size(), reportOn.getName()));
|
||||
}
|
||||
dependencies.addAll(childDeps);
|
||||
} else if (getLog().isDebugEnabled()) {
|
||||
getLog().debug(String.format("No dependencies read for %s", reportOn.getName()));
|
||||
}
|
||||
}
|
||||
engine.getDependencies().clear();
|
||||
engine.getDependencies().addAll(dependencies);
|
||||
final DependencyBundlingAnalyzer bundler = new DependencyBundlingAnalyzer();
|
||||
try {
|
||||
if (getLog().isDebugEnabled()) {
|
||||
getLog().debug(String.format("Dependency count pre-bundler: %s", engine.getDependencies().size()));
|
||||
}
|
||||
bundler.analyze(null, engine);
|
||||
if (getLog().isDebugEnabled()) {
|
||||
getLog().debug(String.format("Dependency count post-bundler: %s", engine.getDependencies().size()));
|
||||
}
|
||||
} catch (AnalysisException ex) {
|
||||
getLog().warn("An error occurred grouping the dependencies; duplicate entries may exist in the report", ex);
|
||||
getLog().debug("Bundling Exception", ex);
|
||||
}
|
||||
ExceptionCollection exCol = scanArtifacts(getProject(), engine);
|
||||
|
||||
File outputDir = getCorrectOutputDirectory(current);
|
||||
if (outputDir == null) {
|
||||
//in some regards we shouldn't be writting this, but we are anyway.
|
||||
//we shouldn't write this because nothing is configured to generate this report.
|
||||
outputDir = new File(current.getBuild().getDirectory());
|
||||
for (MavenProject childProject : getDescendants(this.getProject())) {
|
||||
final ExceptionCollection ex = scanArtifacts(childProject, engine);
|
||||
if (ex != null) {
|
||||
if (exCol == null) {
|
||||
exCol = ex;
|
||||
}
|
||||
try {
|
||||
writeReports(engine, current, outputDir);
|
||||
} catch (ReportException ex) {
|
||||
ExceptionCollection exCol = (ExceptionCollection) engine.getExecutionRoot().getContextValue(AGGREGATE_EXCEPTIONS);
|
||||
if (exCol == null) {
|
||||
exCol = new ExceptionCollection("Error writing aggregate report", ex);
|
||||
} else {
|
||||
exCol.addException(ex);
|
||||
}
|
||||
if (this.isFailOnError()) {
|
||||
throw new MojoExecutionException("One or more exceptions occured during dependency-check analysis", exCol);
|
||||
} else {
|
||||
getLog().debug("One or more exceptions occured during dependency-check analysis", exCol);
|
||||
}
|
||||
exCol.getExceptions().addAll(ex.getExceptions());
|
||||
if (ex.isFatal()) {
|
||||
exCol.setFatal(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
engine.analyzeDependencies();
|
||||
} catch (ExceptionCollection ex) {
|
||||
if (exCol == null) {
|
||||
exCol = ex;
|
||||
} else if (ex.isFatal()) {
|
||||
exCol.setFatal(true);
|
||||
exCol.getExceptions().addAll(ex.getExceptions());
|
||||
}
|
||||
if (exCol.isFatal()) {
|
||||
final String msg = String.format("Fatal exception(s) analyzing %s", getProject().getName());
|
||||
if (this.isFailOnError()) {
|
||||
throw new MojoExecutionException(msg, exCol);
|
||||
}
|
||||
getLog().error(msg);
|
||||
if (getLog().isDebugEnabled()) {
|
||||
getLog().debug(exCol);
|
||||
}
|
||||
return;
|
||||
} else {
|
||||
final String msg = String.format("Exception(s) analyzing %s", getProject().getName());
|
||||
if (getLog().isDebugEnabled()) {
|
||||
getLog().debug(msg, exCol);
|
||||
}
|
||||
}
|
||||
}
|
||||
File outputDir = getCorrectOutputDirectory(this.getProject());
|
||||
if (outputDir == null) {
|
||||
//in some regards we shouldn't be writting this, but we are anyway.
|
||||
//we shouldn't write this because nothing is configured to generate this report.
|
||||
outputDir = new File(this.getProject().getBuild().getDirectory());
|
||||
}
|
||||
try {
|
||||
writeReports(engine, this.getProject(), outputDir);
|
||||
} catch (ReportException ex) {
|
||||
if (exCol == null) {
|
||||
exCol = new ExceptionCollection("Error writing aggregate report", ex);
|
||||
} else {
|
||||
exCol.addException(ex);
|
||||
}
|
||||
if (this.isFailOnError()) {
|
||||
throw new MojoExecutionException("One or more exceptions occurred during dependency-check analysis", exCol);
|
||||
} else {
|
||||
getLog().debug("One or more exceptions occurred during dependency-check analysis", exCol);
|
||||
}
|
||||
}
|
||||
showSummary(this.getProject(), engine.getDependencies());
|
||||
checkForFailure(engine.getDependencies());
|
||||
engine.cleanup();
|
||||
Settings.cleanup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the last project in the reactor - taking into account skipped
|
||||
* projects.
|
||||
*
|
||||
* @return the last project in the reactor
|
||||
*/
|
||||
private MavenProject getLastProject() {
|
||||
for (int x = getReactorProjects().size() - 1; x >= 0; x--) {
|
||||
final MavenProject p = getReactorProjects().get(x);
|
||||
if (!skipProject(p)) {
|
||||
return p;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests if the project is being skipped in the Maven site report.
|
||||
*
|
||||
* @param project a project in the reactor
|
||||
* @return true if the project is skipped; otherwise false
|
||||
*/
|
||||
private boolean skipProject(MavenProject project) {
|
||||
final String skip = (String) project.getProperties().get("maven.site.skip");
|
||||
return "true".equalsIgnoreCase(skip) && isGeneratingSite();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a set containing all the descendant projects of the given
|
||||
* project.
|
||||
@@ -264,16 +224,15 @@ public class AggregateMojo extends BaseDependencyCheckMojo {
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the engine, runs a scan, and writes the serialized
|
||||
* dependencies to disk.
|
||||
* Initializes the engine.
|
||||
*
|
||||
* @return the MavenEngine used to execute dependency-check
|
||||
* @throws MojoExecutionException thrown if there is an exception running
|
||||
* the mojo
|
||||
* the Mojo
|
||||
* @throws MojoFailureException thrown if dependency-check is configured to
|
||||
* fail the build if severe CVEs are identified.
|
||||
*/
|
||||
protected MavenEngine generateDataFile() throws MojoExecutionException, MojoFailureException {
|
||||
protected MavenEngine loadEngine() throws MojoExecutionException, MojoFailureException {
|
||||
MavenEngine engine = null;
|
||||
try {
|
||||
engine = initializeEngine();
|
||||
@@ -281,64 +240,12 @@ public class AggregateMojo extends BaseDependencyCheckMojo {
|
||||
if (getLog().isDebugEnabled()) {
|
||||
getLog().debug("Database connection error", ex);
|
||||
}
|
||||
final String msg = "An exception occured connecting to the local database. Please see the log file for more details.";
|
||||
final String msg = "An exception occurred connecting to the local database. Please see the log file for more details.";
|
||||
if (this.isFailOnError()) {
|
||||
throw new MojoExecutionException(msg, ex);
|
||||
}
|
||||
getLog().error(msg, ex);
|
||||
return null;
|
||||
}
|
||||
return generateDataFile(engine, getProject());
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs dependency-check's MavenEngine and writes the serialized
|
||||
* dependencies to disk.
|
||||
*
|
||||
* @param engine the MavenEngine to use when scanning.
|
||||
* @param project the project to scan and generate the data file for
|
||||
* @return the MavenEngine used to execute dependency-check
|
||||
* @throws MojoExecutionException thrown if there is an exception running
|
||||
* the mojo
|
||||
* @throws MojoFailureException thrown if dependency-check is configured to
|
||||
* fail the build if severe CVEs are identified.
|
||||
*/
|
||||
protected MavenEngine generateDataFile(MavenEngine engine, MavenProject project) throws MojoExecutionException, MojoFailureException {
|
||||
if (getLog().isDebugEnabled()) {
|
||||
getLog().debug(String.format("Begin Scanning: %s", project.getName()));
|
||||
}
|
||||
engine.getDependencies().clear();
|
||||
engine.resetFileTypeAnalyzers();
|
||||
scanArtifacts(project, engine);
|
||||
try {
|
||||
engine.analyzeDependencies();
|
||||
} catch (ExceptionCollection ex) {
|
||||
ExceptionCollection col = (ExceptionCollection) engine.getExecutionRoot().getContextValue(AGGREGATE_EXCEPTIONS);
|
||||
if (col == null) {
|
||||
col = ex;
|
||||
} else if (ex.isFatal()) {
|
||||
col.setFatal(true);
|
||||
col.getExceptions().addAll(ex.getExceptions());
|
||||
}
|
||||
if (col.isFatal()) {
|
||||
final String msg = String.format("Fatal exception(s) analyzing %s", project.getName());
|
||||
if (this.isFailOnError()) {
|
||||
throw new MojoExecutionException(msg, ex);
|
||||
}
|
||||
getLog().error(msg, col);
|
||||
return null;
|
||||
} else {
|
||||
final String msg = String.format("Exception(s) analyzing %s", project.getName());
|
||||
if (getLog().isDebugEnabled()) {
|
||||
getLog().debug(msg, ex);
|
||||
}
|
||||
engine.getExecutionRoot().setContextValue(AGGREGATE_EXCEPTIONS, col);
|
||||
}
|
||||
}
|
||||
final File target = new File(project.getBuild().getDirectory());
|
||||
writeDataFile(project, target, engine.getDependencies());
|
||||
showSummary(project, engine.getDependencies());
|
||||
checkForFailure(engine.getDependencies());
|
||||
return engine;
|
||||
}
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ import java.io.InputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import org.apache.maven.artifact.Artifact;
|
||||
import org.eclipse.aether.artifact.Artifact;
|
||||
import org.apache.maven.doxia.sink.Sink;
|
||||
import org.apache.maven.plugin.AbstractMojo;
|
||||
import org.apache.maven.plugin.MojoExecutionException;
|
||||
@@ -39,6 +39,16 @@ import org.apache.maven.reporting.MavenReport;
|
||||
import org.apache.maven.reporting.MavenReportException;
|
||||
import org.apache.maven.settings.Proxy;
|
||||
import org.apache.maven.settings.Server;
|
||||
import org.apache.maven.shared.dependency.graph.DependencyGraphBuilder;
|
||||
import org.apache.maven.shared.dependency.graph.DependencyGraphBuilderException;
|
||||
import org.apache.maven.shared.dependency.graph.DependencyNode;
|
||||
import org.eclipse.aether.RepositorySystem;
|
||||
import org.eclipse.aether.RepositorySystemSession;
|
||||
import org.eclipse.aether.artifact.DefaultArtifact;
|
||||
import org.eclipse.aether.repository.RemoteRepository;
|
||||
import org.eclipse.aether.resolution.ArtifactRequest;
|
||||
import org.eclipse.aether.resolution.ArtifactResolutionException;
|
||||
import org.eclipse.aether.resolution.ArtifactResult;
|
||||
import org.owasp.dependencycheck.data.nexus.MavenArtifact;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
@@ -47,6 +57,7 @@ import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.exception.ExceptionCollection;
|
||||
import org.owasp.dependencycheck.exception.ReportException;
|
||||
import org.owasp.dependencycheck.reporting.ReportGenerator;
|
||||
import org.owasp.dependencycheck.utils.ExpectedOjectInputStream;
|
||||
@@ -102,6 +113,30 @@ public abstract class BaseDependencyCheckMojo extends AbstractMojo implements Ma
|
||||
*/
|
||||
@Parameter(readonly = true, required = true, property = "reactorProjects")
|
||||
private List<MavenProject> reactorProjects;
|
||||
/**
|
||||
* The entry point to Aether, i.e. the component doing all the work.
|
||||
*/
|
||||
@Component
|
||||
private RepositorySystem repoSystem;
|
||||
|
||||
/**
|
||||
* The current repository/network configuration of Maven.
|
||||
*/
|
||||
@Parameter(defaultValue = "${repositorySystemSession}", readonly = true)
|
||||
private RepositorySystemSession repoSession;
|
||||
|
||||
/**
|
||||
* The project's remote repositories to use for the resolution of plug-ins
|
||||
* and their dependencies.
|
||||
*/
|
||||
@Parameter(defaultValue = "${project.remotePluginRepositories}", readonly = true)
|
||||
private List<RemoteRepository> remoteRepos;
|
||||
|
||||
/**
|
||||
* Component within Maven to build the dependency graph.
|
||||
*/
|
||||
@Component
|
||||
private DependencyGraphBuilder dependencyGraphBuilder;
|
||||
|
||||
/**
|
||||
* The output directory. This generally maps to "target".
|
||||
@@ -171,6 +206,13 @@ public abstract class BaseDependencyCheckMojo extends AbstractMojo implements Ma
|
||||
*/
|
||||
@Parameter(property = "suppressionFile", defaultValue = "", required = false)
|
||||
private String suppressionFile;
|
||||
|
||||
/**
|
||||
* The path to the hints file.
|
||||
*/
|
||||
@Parameter(property = "hintsFile", defaultValue = "", required = false)
|
||||
private String hintsFile;
|
||||
|
||||
/**
|
||||
* Flag indicating whether or not to show a summary in the output.
|
||||
*/
|
||||
@@ -553,32 +595,121 @@ public abstract class BaseDependencyCheckMojo extends AbstractMojo implements Ma
|
||||
*
|
||||
* @param project the project to scan the dependencies of
|
||||
* @param engine the engine to use to scan the dependencies
|
||||
* @return a collection of exceptions that may have occurred while resolving
|
||||
* and scanning the dependencies
|
||||
*/
|
||||
protected void scanArtifacts(MavenProject project, MavenEngine engine) {
|
||||
for (Artifact a : project.getArtifacts()) {
|
||||
protected ExceptionCollection scanArtifacts(MavenProject project, MavenEngine engine) {
|
||||
// <editor-fold defaultstate="collapsed" desc="old implementation">
|
||||
/*
|
||||
for (Artifact a : project.getArtifacts()) {
|
||||
if (excludeFromScan(a)) {
|
||||
continue;
|
||||
continue;
|
||||
}
|
||||
final List<Dependency> deps = engine.scan(a.getFile().getAbsoluteFile());
|
||||
if (deps != null) {
|
||||
if (deps.size() == 1) {
|
||||
final Dependency d = deps.get(0);
|
||||
if (d != null) {
|
||||
final MavenArtifact ma = new MavenArtifact(a.getGroupId(), a.getArtifactId(), a.getVersion());
|
||||
d.addAsEvidence("pom", ma, Confidence.HIGHEST);
|
||||
d.addProjectReference(project.getName());
|
||||
if (getLog().isDebugEnabled()) {
|
||||
getLog().debug(String.format("Adding project reference %s on dependency %s", project.getName(),
|
||||
d.getDisplayFileName()));
|
||||
if (deps.size() == 1) {
|
||||
final Dependency d = deps.get(0);
|
||||
if (d != null) {
|
||||
final MavenArtifact ma = new MavenArtifact(a.getGroupId(), a.getArtifactId(), a.getVersion());
|
||||
d.addAsEvidence("pom", ma, Confidence.HIGHEST);
|
||||
d.addProjectReference(project.getName());
|
||||
if (getLog().isDebugEnabled()) {
|
||||
getLog().debug(String.format("Adding project reference %s on dependency %s", project.getName(),
|
||||
d.getDisplayFileName()));
|
||||
}
|
||||
}
|
||||
} else if (getLog().isDebugEnabled()) {
|
||||
final String msg = String.format("More then 1 dependency was identified in first pass scan of '%s:%s:%s'",
|
||||
a.getGroupId(), a.getArtifactId(), a.getVersion());
|
||||
getLog().debug(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
// </editor-fold>
|
||||
try {
|
||||
final DependencyNode dn = dependencyGraphBuilder.buildDependencyGraph(project, null, reactorProjects);
|
||||
return collectDependencies(engine, project, dn.getChildren());
|
||||
} catch (DependencyGraphBuilderException ex) {
|
||||
final String msg = String.format("Unable to build dependency graph on project %s", project.getName());
|
||||
getLog().debug(msg, ex);
|
||||
return new ExceptionCollection(msg, ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the projects artifacts using Aether and scans the resulting
|
||||
* dependencies.
|
||||
*
|
||||
* @param engine the core dependency-check engine
|
||||
* @param project the project being scanned
|
||||
* @param nodes the list of dependency nodes, generally obtained via the
|
||||
* DependencyGraphBuilder
|
||||
* @return a collection of exceptions that may have occurred while resolving
|
||||
* and scanning the dependencies
|
||||
*/
|
||||
private ExceptionCollection collectDependencies(MavenEngine engine, MavenProject project, List<DependencyNode> nodes) {
|
||||
ExceptionCollection exCol = null;
|
||||
for (DependencyNode dependencyNode : nodes) {
|
||||
exCol = collectDependencies(engine, project, dependencyNode.getChildren());
|
||||
if (excludeFromScan(dependencyNode.getArtifact().getScope())) {
|
||||
continue;
|
||||
}
|
||||
final ArtifactRequest request = new ArtifactRequest();
|
||||
request.setArtifact(new DefaultArtifact(dependencyNode.getArtifact().getId()));
|
||||
request.setRepositories(remoteRepos);
|
||||
try {
|
||||
final ArtifactResult result = repoSystem.resolveArtifact(repoSession, request);
|
||||
if (result.isResolved() && result.getArtifact() != null && result.getArtifact().getFile() != null) {
|
||||
final List<Dependency> deps = engine.scan(result.getArtifact().getFile().getAbsoluteFile(),
|
||||
project.getName() + ":" + dependencyNode.getArtifact().getScope());
|
||||
if (deps != null) {
|
||||
if (deps.size() == 1) {
|
||||
final Dependency d = deps.get(0);
|
||||
if (d != null) {
|
||||
final Artifact a = result.getArtifact();
|
||||
final MavenArtifact ma = new MavenArtifact(a.getGroupId(), a.getArtifactId(), a.getVersion());
|
||||
d.addAsEvidence("pom", ma, Confidence.HIGHEST);
|
||||
if (getLog().isDebugEnabled()) {
|
||||
getLog().debug(String.format("Adding project reference %s on dependency %s",
|
||||
project.getName(), d.getDisplayFileName()));
|
||||
}
|
||||
}
|
||||
} else if (getLog().isDebugEnabled()) {
|
||||
final String msg = String.format("More then 1 dependency was identified in first pass scan of '%s' in project %s",
|
||||
dependencyNode.getArtifact().getId(), project.getName());
|
||||
getLog().debug(msg);
|
||||
}
|
||||
} else {
|
||||
final String msg = String.format("Error resolving '%s' in project %s",
|
||||
dependencyNode.getArtifact().getId(), project.getName());
|
||||
if (exCol == null) {
|
||||
exCol = new ExceptionCollection();
|
||||
}
|
||||
getLog().error(msg);
|
||||
for (Exception ex : result.getExceptions()) {
|
||||
exCol.addException(ex);
|
||||
}
|
||||
}
|
||||
} else if (getLog().isDebugEnabled()) {
|
||||
final String msg = String.format("More then 1 dependency was identified in first pass scan of '%s:%s:%s'",
|
||||
a.getGroupId(), a.getArtifactId(), a.getVersion());
|
||||
} else {
|
||||
final String msg = String.format("Unable to resolve '%s' in project %s",
|
||||
dependencyNode.getArtifact().getId(), project.getName());
|
||||
getLog().debug(msg);
|
||||
if (exCol == null) {
|
||||
exCol = new ExceptionCollection();
|
||||
}
|
||||
for (Exception ex : result.getExceptions()) {
|
||||
exCol.addException(ex);
|
||||
}
|
||||
}
|
||||
} catch (ArtifactResolutionException ex) {
|
||||
if (exCol == null) {
|
||||
exCol = new ExceptionCollection();
|
||||
}
|
||||
exCol.addException(ex);
|
||||
}
|
||||
}
|
||||
return exCol;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -669,8 +800,7 @@ public abstract class BaseDependencyCheckMojo extends AbstractMojo implements Ma
|
||||
*/
|
||||
protected MavenEngine initializeEngine() throws DatabaseException {
|
||||
populateSettings();
|
||||
return new MavenEngine(this.project,
|
||||
this.reactorProjects);
|
||||
return new MavenEngine(this.project, this.reactorProjects);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -725,6 +855,7 @@ public abstract class BaseDependencyCheckMojo extends AbstractMojo implements Ma
|
||||
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
|
||||
|
||||
//File Type Analyzer Settings
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_JAR_ENABLED, jarAnalyzerEnabled);
|
||||
@@ -835,18 +966,18 @@ public abstract class BaseDependencyCheckMojo extends AbstractMojo implements Ma
|
||||
* Tests is the artifact should be included in the scan (i.e. is the
|
||||
* dependency in a scope that is being scanned).
|
||||
*
|
||||
* @param a the Artifact to test
|
||||
* @param scope the scope of the artifact to test
|
||||
* @return <code>true</code> if the artifact is in an excluded scope;
|
||||
* otherwise <code>false</code>
|
||||
*/
|
||||
protected boolean excludeFromScan(Artifact a) {
|
||||
if (skipTestScope && Artifact.SCOPE_TEST.equals(a.getScope())) {
|
||||
protected boolean excludeFromScan(String scope) {
|
||||
if (skipTestScope && org.apache.maven.artifact.Artifact.SCOPE_TEST.equals(scope)) {
|
||||
return true;
|
||||
}
|
||||
if (skipProvidedScope && Artifact.SCOPE_PROVIDED.equals(a.getScope())) {
|
||||
if (skipProvidedScope && org.apache.maven.artifact.Artifact.SCOPE_PROVIDED.equals(scope)) {
|
||||
return true;
|
||||
}
|
||||
if (skipRuntimeScope && !Artifact.SCOPE_RUNTIME.equals(a.getScope())) {
|
||||
if (skipRuntimeScope && !org.apache.maven.artifact.Artifact.SCOPE_RUNTIME.equals(scope)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@@ -1133,4 +1264,5 @@ public abstract class BaseDependencyCheckMojo extends AbstractMojo implements Ma
|
||||
return ret;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ public class CheckMojo extends BaseDependencyCheckMojo {
|
||||
public boolean canGenerateReport() {
|
||||
boolean isCapable = false;
|
||||
for (Artifact a : getProject().getArtifacts()) {
|
||||
if (!excludeFromScan(a)) {
|
||||
if (!excludeFromScan(a.getScope())) {
|
||||
isCapable = true;
|
||||
break;
|
||||
}
|
||||
@@ -81,23 +81,22 @@ public class CheckMojo extends BaseDependencyCheckMojo {
|
||||
if (getLog().isDebugEnabled()) {
|
||||
getLog().debug("Database connection error", ex);
|
||||
}
|
||||
final String msg = "An exception occured connecting to the local database. Please see the log file for more details.";
|
||||
final String msg = "An exception occurred connecting to the local database. Please see the log file for more details.";
|
||||
if (this.isFailOnError()) {
|
||||
throw new MojoExecutionException(msg, ex);
|
||||
}
|
||||
getLog().error(msg);
|
||||
}
|
||||
if (engine != null) {
|
||||
scanArtifacts(getProject(), engine);
|
||||
ExceptionCollection exCol = scanArtifacts(getProject(), engine);
|
||||
if (engine.getDependencies().isEmpty()) {
|
||||
getLog().info("No dependencies were identified that could be analyzed by dependency-check");
|
||||
} else {
|
||||
ExceptionCollection exCol = null;
|
||||
try {
|
||||
engine.analyzeDependencies();
|
||||
} catch (ExceptionCollection ex) {
|
||||
if (this.isFailOnError() && ex.isFatal()) {
|
||||
throw new MojoExecutionException("One or more exceptions occured during analysis", ex);
|
||||
throw new MojoExecutionException("One or more exceptions occurred during analysis", ex);
|
||||
}
|
||||
exCol = ex;
|
||||
}
|
||||
@@ -113,11 +112,11 @@ public class CheckMojo extends BaseDependencyCheckMojo {
|
||||
}
|
||||
}
|
||||
}
|
||||
writeDataFile(getProject(), null, engine.getDependencies());
|
||||
//writeDataFile(getProject(), null, engine.getDependencies());
|
||||
showSummary(getProject(), engine.getDependencies());
|
||||
checkForFailure(engine.getDependencies());
|
||||
if (exCol != null && this.isFailOnError()) {
|
||||
throw new MojoExecutionException("One or more exceptions occured during dependency-check analysis", exCol);
|
||||
throw new MojoExecutionException("One or more exceptions occurred during dependency-check analysis", exCol);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,13 +71,13 @@ public class UpdateMojo extends BaseDependencyCheckMojo {
|
||||
if (getLog().isDebugEnabled()) {
|
||||
getLog().debug("Database connection error", ex);
|
||||
}
|
||||
final String msg = "An exception occured connecting to the local database. Please see the log file for more details.";
|
||||
final String msg = "An exception occurred connecting to the local database. Please see the log file for more details.";
|
||||
if (this.isFailOnError()) {
|
||||
throw new MojoExecutionException(msg, ex);
|
||||
}
|
||||
getLog().error(msg);
|
||||
} catch (UpdateException ex) {
|
||||
final String msg = "An exception occured while downloading updates. Please see the log file for more details.";
|
||||
final String msg = "An exception occurred while downloading updates. Please see the log file for more details.";
|
||||
if (this.isFailOnError()) {
|
||||
throw new MojoExecutionException(msg, ex);
|
||||
}
|
||||
|
||||
@@ -1,335 +0,0 @@
|
||||
/*
|
||||
* This file is part of dependency-check-ant.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 The OWASP Foundation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.maven.slf4j;
|
||||
|
||||
import org.apache.maven.plugin.logging.Log;
|
||||
import org.slf4j.helpers.MarkerIgnoringBase;
|
||||
import org.slf4j.helpers.MessageFormatter;
|
||||
|
||||
/**
|
||||
* Created on 6/14/15.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
public class MavenLoggerAdapter extends MarkerIgnoringBase {
|
||||
|
||||
/**
|
||||
* A reference to the Maven log.
|
||||
*/
|
||||
private final Log log;
|
||||
|
||||
/**
|
||||
* Creates a new Maven Logger Adapter.
|
||||
*
|
||||
* @param log the Maven log
|
||||
*/
|
||||
public MavenLoggerAdapter(Log log) {
|
||||
super();
|
||||
this.log = log;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if trace is enabled.
|
||||
*
|
||||
* @return whether or not trace is enabled
|
||||
*/
|
||||
@Override
|
||||
public boolean isTraceEnabled() {
|
||||
if (log != null) {
|
||||
return log.isDebugEnabled();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void trace(String msg) {
|
||||
if (log != null) {
|
||||
log.debug(msg);
|
||||
} else {
|
||||
System.out.println(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void trace(String format, Object arg) {
|
||||
final String message = MessageFormatter.format(format, arg).getMessage();
|
||||
if (log != null) {
|
||||
log.debug(message);
|
||||
} else {
|
||||
System.out.println(message);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void trace(String format, Object arg1, Object arg2) {
|
||||
final String message = MessageFormatter.format(format, arg1, arg2).getMessage();
|
||||
if (log != null) {
|
||||
log.debug(message);
|
||||
} else {
|
||||
System.out.println(message);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void trace(String format, Object... arguments) {
|
||||
final String message = MessageFormatter.format(format, arguments).getMessage();
|
||||
if (log != null) {
|
||||
log.debug(message);
|
||||
} else {
|
||||
System.out.println(message);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void trace(String msg, Throwable t) {
|
||||
if (log != null) {
|
||||
log.debug(msg, t);
|
||||
} else {
|
||||
System.out.println(msg);
|
||||
t.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDebugEnabled() {
|
||||
if (log != null) {
|
||||
return log.isDebugEnabled();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void debug(String msg) {
|
||||
if (log != null) {
|
||||
log.debug(msg);
|
||||
} else {
|
||||
System.out.println(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void debug(String format, Object arg) {
|
||||
final String message = MessageFormatter.format(format, arg).getMessage();
|
||||
if (log != null) {
|
||||
log.debug(message);
|
||||
} else {
|
||||
System.out.println(message);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void debug(String format, Object arg1, Object arg2) {
|
||||
final String message = MessageFormatter.format(format, arg1, arg2).getMessage();
|
||||
if (log != null) {
|
||||
log.debug(message);
|
||||
} else {
|
||||
System.out.println(message);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void debug(String format, Object... arguments) {
|
||||
final String message = MessageFormatter.format(format, arguments).getMessage();
|
||||
if (log != null) {
|
||||
log.debug(message);
|
||||
} else {
|
||||
System.out.println(message);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void debug(String msg, Throwable t) {
|
||||
if (log != null) {
|
||||
log.debug(msg, t);
|
||||
} else {
|
||||
System.out.println(msg);
|
||||
t.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInfoEnabled() {
|
||||
if (log != null) {
|
||||
return log.isInfoEnabled();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void info(String msg) {
|
||||
if (log != null) {
|
||||
log.info(msg);
|
||||
} else {
|
||||
System.out.println(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void info(String format, Object arg) {
|
||||
final String message = MessageFormatter.format(format, arg).getMessage();
|
||||
if (log != null) {
|
||||
log.info(message);
|
||||
} else {
|
||||
System.out.println(message);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void info(String format, Object arg1, Object arg2) {
|
||||
final String message = MessageFormatter.format(format, arg1, arg2).getMessage();
|
||||
if (log != null) {
|
||||
log.info(message);
|
||||
} else {
|
||||
System.out.println(message);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void info(String format, Object... arguments) {
|
||||
final String message = MessageFormatter.format(format, arguments).getMessage();
|
||||
if (log != null) {
|
||||
log.info(message);
|
||||
} else {
|
||||
System.out.println(message);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void info(String msg, Throwable t) {
|
||||
if (log != null) {
|
||||
log.info(msg, t);
|
||||
} else {
|
||||
System.out.println(msg);
|
||||
t.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isWarnEnabled() {
|
||||
if (log != null) {
|
||||
return log.isWarnEnabled();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void warn(String msg) {
|
||||
if (log != null) {
|
||||
log.warn(msg);
|
||||
} else {
|
||||
System.out.println(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void warn(String format, Object arg) {
|
||||
final String message = MessageFormatter.format(format, arg).getMessage();
|
||||
if (log != null) {
|
||||
log.warn(message);
|
||||
} else {
|
||||
System.out.println(message);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void warn(String format, Object arg1, Object arg2) {
|
||||
final String message = MessageFormatter.format(format, arg1, arg2).getMessage();
|
||||
if (log != null) {
|
||||
log.warn(message);
|
||||
} else {
|
||||
System.out.println(message);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void warn(String format, Object... arguments) {
|
||||
final String message = MessageFormatter.format(format, arguments).getMessage();
|
||||
if (log != null) {
|
||||
log.warn(message);
|
||||
} else {
|
||||
System.out.println(message);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void warn(String msg, Throwable t) {
|
||||
if (log != null) {
|
||||
log.warn(msg, t);
|
||||
} else {
|
||||
System.out.println(msg);
|
||||
t.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isErrorEnabled() {
|
||||
if (log != null) {
|
||||
return log.isErrorEnabled();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(String msg) {
|
||||
if (log != null) {
|
||||
log.error(msg);
|
||||
} else {
|
||||
System.out.println(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(String format, Object arg) {
|
||||
final String message = MessageFormatter.format(format, arg).getMessage();
|
||||
if (log != null) {
|
||||
log.error(message);
|
||||
} else {
|
||||
System.out.println(message);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(String format, Object arg1, Object arg2) {
|
||||
final String message = MessageFormatter.format(format, arg1, arg2).getMessage();
|
||||
if (log != null) {
|
||||
log.error(message);
|
||||
} else {
|
||||
System.out.println(message);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(String format, Object... arguments) {
|
||||
final String message = MessageFormatter.format(format, arguments).getMessage();
|
||||
if (log != null) {
|
||||
log.error(message);
|
||||
} else {
|
||||
System.out.println(message);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(String msg, Throwable t) {
|
||||
if (log != null) {
|
||||
log.error(msg, t);
|
||||
} else {
|
||||
System.out.println(msg);
|
||||
t.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
/*
|
||||
* This file is part of dependency-check-ant.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 The OWASP Foundation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.maven.slf4j;
|
||||
|
||||
import org.apache.maven.plugin.logging.Log;
|
||||
import org.slf4j.ILoggerFactory;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
/**
|
||||
* Created on 6/14/15.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
public class MavenLoggerFactory implements ILoggerFactory {
|
||||
|
||||
/**
|
||||
* A reference to the Maven log adapter.
|
||||
*/
|
||||
private final MavenLoggerAdapter mavenLoggerAdapter;
|
||||
|
||||
/**
|
||||
* Constructs a new logger factory.
|
||||
*
|
||||
* @param log a reference to the Maven log
|
||||
*/
|
||||
public MavenLoggerFactory(Log log) {
|
||||
super();
|
||||
this.mavenLoggerAdapter = new MavenLoggerAdapter(log);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Maven Logger Adapter.
|
||||
*
|
||||
* @param name ignored in this implementation
|
||||
* @return the maven logger adapter
|
||||
*/
|
||||
@Override
|
||||
public Logger getLogger(String name) {
|
||||
return mavenLoggerAdapter;
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
/**
|
||||
* This package contains the the slf4j adapter that wraps the maven logger.
|
||||
*/
|
||||
package org.owasp.dependencycheck.maven.slf4j;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user