mirror of
https://github.com/ysoftdevs/DependencyCheck.git
synced 2026-01-14 15:53:36 +01:00
Merge pull request #877 from jeremylong/dependency-updates
Thread Safety Updates
This commit is contained in:
19
.travis.yml
19
.travis.yml
@@ -29,12 +29,12 @@ matrix:
|
||||
env:
|
||||
- JDK="JDK7"
|
||||
script:
|
||||
- if [ ! -z "$TRAVIS_TAG" ]; then travis_wait 20 mvn install site site:stage -DreleaseTesting; else travis_wait 15 mvn install -DreleaseTesting; fi
|
||||
- if [ ! -z "$TRAVIS_TAG" ]; then travis_wait 35 mvn install site site:stage -DreleaseTesting; else travis_wait 35 mvn install -DreleaseTesting; fi
|
||||
- jdk: oraclejdk8
|
||||
env:
|
||||
- JDK="JDK8"
|
||||
script:
|
||||
- travis_wait 15 mvn install -DreleaseTesting
|
||||
- travis_wait 35 mvn install -DreleaseTesting
|
||||
|
||||
after_success:
|
||||
- if [ "$JDK" == "JDK8" ]; then
|
||||
@@ -43,6 +43,21 @@ after_success:
|
||||
./coverity_scan.sh;
|
||||
fi;
|
||||
|
||||
after_failure:
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/617-hierarchical-cross-deps/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/618-aggregator-purge/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/618-aggregator-update-only/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/629-jackson-dataformat/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/690-threadsafety/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/710-pom-parse-error/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/729-system-scope-resolved/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/729-system-scope-skipped/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/730-multiple-suppression-files/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/730-multiple-suppression-files-configs/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/815-broken-suppression-aggregate/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/846-site-plugin/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/false-positives/build.log
|
||||
|
||||
deploy:
|
||||
- provider: script
|
||||
script: mvn --settings .travis.settings.xml source:jar javadoc:jar package deploy -DskipTests=true -X
|
||||
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2017 - Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>2.1.2-SNAPSHOT</version>
|
||||
<version>3.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<name>Dependency-Check Build-Reporting</name>
|
||||
<artifactId>build-reporting</artifactId>
|
||||
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>2.1.2-SNAPSHOT</version>
|
||||
<version>3.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-ant</artifactId>
|
||||
|
||||
@@ -20,6 +20,7 @@ package org.owasp.dependencycheck.taskdefs;
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
|
||||
import org.apache.tools.ant.BuildException;
|
||||
import org.apache.tools.ant.Project;
|
||||
@@ -46,6 +47,7 @@ import org.slf4j.impl.StaticLoggerBinder;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@NotThreadSafe
|
||||
public class Check extends Update {
|
||||
|
||||
/**
|
||||
@@ -161,6 +163,7 @@ public class Check extends Update {
|
||||
/**
|
||||
* Suppression file paths.
|
||||
*/
|
||||
@SuppressWarnings("CanBeFinal")
|
||||
private List<String> suppressionFiles = new ArrayList<>();
|
||||
|
||||
/**
|
||||
@@ -948,7 +951,7 @@ public class Check extends Update {
|
||||
dealWithReferences();
|
||||
validateConfiguration();
|
||||
populateSettings();
|
||||
try (Engine engine = new Engine(Check.class.getClassLoader())) {
|
||||
try (Engine engine = new Engine(Check.class.getClassLoader(), getSettings())) {
|
||||
if (isUpdateOnly()) {
|
||||
log("Deprecated 'UpdateOnly' property set; please use the UpdateTask instead", Project.MSG_WARN);
|
||||
try {
|
||||
@@ -998,8 +1001,6 @@ public class Check extends Update {
|
||||
throw new BuildException(msg, ex);
|
||||
}
|
||||
log(msg, ex, Project.MSG_ERR);
|
||||
} finally {
|
||||
Settings.cleanup(true);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1028,33 +1029,33 @@ public class Check extends Update {
|
||||
@Override
|
||||
protected void populateSettings() throws BuildException {
|
||||
super.populateSettings();
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
Settings.setArrayIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFiles.toArray(new String[suppressionFiles.size()]));
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, enableExperimental);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_JAR_ENABLED, jarAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, pyDistributionAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, pyPackageAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, rubygemsAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, opensslAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_CMAKE_ENABLED, cmakeAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, swiftPackageManagerAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, cocoapodsAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, bundleAuditAnalyzerEnabled);
|
||||
Settings.setStringIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, bundleAuditPath);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, autoconfAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, composerAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, nodeAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NSP_PACKAGE_ENABLED, nspAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, nuspecAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, centralAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NEXUS_ENABLED, nexusAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, archiveAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, assemblyAnalyzerEnabled);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
getSettings().setArrayIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFiles.toArray(new String[suppressionFiles.size()]));
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, enableExperimental);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_JAR_ENABLED, jarAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, pyDistributionAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, pyPackageAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, rubygemsAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, opensslAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_CMAKE_ENABLED, cmakeAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, swiftPackageManagerAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, cocoapodsAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, bundleAuditAnalyzerEnabled);
|
||||
getSettings().setStringIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, bundleAuditPath);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, autoconfAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, composerAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, nodeAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_NSP_PACKAGE_ENABLED, nspAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, nuspecAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, centralAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_NEXUS_ENABLED, nexusAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, archiveAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, assemblyAnalyzerEnabled);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1065,7 +1066,7 @@ public class Check extends Update {
|
||||
* @throws BuildException thrown if a CVSS score is found that is higher
|
||||
* than the threshold set
|
||||
*/
|
||||
private void checkForFailure(List<Dependency> dependencies) throws BuildException {
|
||||
private void checkForFailure(Dependency[] dependencies) throws BuildException {
|
||||
final StringBuilder ids = new StringBuilder();
|
||||
for (Dependency d : dependencies) {
|
||||
for (Vulnerability v : d.getVulnerabilities()) {
|
||||
@@ -1092,7 +1093,7 @@ public class Check extends Update {
|
||||
*
|
||||
* @param dependencies a list of dependency objects
|
||||
*/
|
||||
private void showSummary(List<Dependency> dependencies) {
|
||||
private void showSummary(Dependency[] dependencies) {
|
||||
final StringBuilder summary = new StringBuilder();
|
||||
for (Dependency d : dependencies) {
|
||||
boolean firstEntry = true;
|
||||
|
||||
@@ -37,21 +37,35 @@ public class Purge extends Task {
|
||||
* The properties file location.
|
||||
*/
|
||||
private static final String PROPERTIES_FILE = "task.properties";
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private Settings settings;
|
||||
|
||||
/**
|
||||
* The location of the data directory that contains
|
||||
*/
|
||||
private String dataDirectory = null;
|
||||
/**
|
||||
* Indicates if dependency-check should fail the build if an exception
|
||||
* occurs.
|
||||
*/
|
||||
private boolean failOnError = true;
|
||||
|
||||
/**
|
||||
* Construct a new DependencyCheckTask.
|
||||
*/
|
||||
public Purge() {
|
||||
super();
|
||||
|
||||
// Call this before Dependency Check Core starts logging anything - this way, all SLF4J messages from
|
||||
// core end up coming through this tasks logger
|
||||
StaticLoggerBinder.getSingleton().setTask(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* The location of the data directory that contains
|
||||
*/
|
||||
private String dataDirectory = null;
|
||||
public Settings getSettings() {
|
||||
return settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of dataDirectory.
|
||||
@@ -71,12 +85,6 @@ public class Purge extends Task {
|
||||
this.dataDirectory = dataDirectory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates if dependency-check should fail the build if an exception
|
||||
* occurs.
|
||||
*/
|
||||
private boolean failOnError = true;
|
||||
|
||||
/**
|
||||
* Get the value of failOnError.
|
||||
*
|
||||
@@ -106,7 +114,7 @@ public class Purge extends Task {
|
||||
populateSettings();
|
||||
File db;
|
||||
try {
|
||||
db = new File(Settings.getDataDirectory(), "dc.h2.db");
|
||||
db = new File(settings.getDataDirectory(), "dc.h2.db");
|
||||
if (db.exists()) {
|
||||
if (db.delete()) {
|
||||
log("Database file purged; local copy of the NVD has been removed", Project.MSG_INFO);
|
||||
@@ -131,7 +139,7 @@ public class Purge extends Task {
|
||||
}
|
||||
log(msg, Project.MSG_ERR);
|
||||
} finally {
|
||||
Settings.cleanup(true);
|
||||
settings.cleanup(true);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -143,9 +151,9 @@ public class Purge extends Task {
|
||||
* @throws BuildException thrown if the properties file cannot be read.
|
||||
*/
|
||||
protected void populateSettings() throws BuildException {
|
||||
Settings.initialize();
|
||||
settings = new Settings();
|
||||
try (InputStream taskProperties = this.getClass().getClassLoader().getResourceAsStream(PROPERTIES_FILE)) {
|
||||
Settings.mergeProperties(taskProperties);
|
||||
settings.mergeProperties(taskProperties);
|
||||
} catch (IOException ex) {
|
||||
final String msg = "Unable to load the dependency-check ant task.properties file.";
|
||||
if (this.failOnError) {
|
||||
@@ -154,13 +162,13 @@ public class Purge extends Task {
|
||||
log(msg, ex, Project.MSG_WARN);
|
||||
}
|
||||
if (dataDirectory != null) {
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
|
||||
} else {
|
||||
final File jarPath = new File(Purge.class.getProtectionDomain().getCodeSource().getLocation().getPath());
|
||||
final File base = jarPath.getParentFile();
|
||||
final String sub = Settings.getString(Settings.KEYS.DATA_DIRECTORY);
|
||||
final String sub = settings.getString(Settings.KEYS.DATA_DIRECTORY);
|
||||
final File dataDir = new File(base, sub);
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -385,7 +385,7 @@ public class Update extends Purge {
|
||||
@Override
|
||||
public void execute() throws BuildException {
|
||||
populateSettings();
|
||||
try (Engine engine = new Engine(Update.class.getClassLoader())) {
|
||||
try (Engine engine = new Engine(Update.class.getClassLoader(), getSettings())) {
|
||||
try {
|
||||
engine.doUpdates();
|
||||
} catch (UpdateException ex) {
|
||||
@@ -400,8 +400,6 @@ public class Update extends Purge {
|
||||
throw new BuildException(msg, ex);
|
||||
}
|
||||
log(msg, Project.MSG_ERR);
|
||||
} finally {
|
||||
Settings.cleanup(true);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -415,23 +413,23 @@ public class Update extends Purge {
|
||||
@Override
|
||||
protected void populateSettings() throws BuildException {
|
||||
super.populateSettings();
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUsername);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPassword);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUsername);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPassword);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base);
|
||||
if (cveValidForHours != null) {
|
||||
if (cveValidForHours >= 0) {
|
||||
Settings.setInt(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
|
||||
getSettings().setInt(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
|
||||
} else {
|
||||
throw new BuildException("Invalid setting: `cpeValidForHours` must be 0 or greater");
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ public class StaticLoggerBinder implements LoggerFactoryBinder {
|
||||
*
|
||||
* @return the StaticLoggerBinder singleton
|
||||
*/
|
||||
public static final StaticLoggerBinder getSingleton() {
|
||||
public static StaticLoggerBinder getSingleton() {
|
||||
return SINGLETON;
|
||||
}
|
||||
|
||||
|
||||
@@ -21,13 +21,12 @@ import java.io.File;
|
||||
|
||||
import org.apache.tools.ant.BuildException;
|
||||
import org.apache.tools.ant.BuildFileRule;
|
||||
import org.junit.After;
|
||||
import org.apache.tools.ant.types.LogLevel;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.owasp.dependencycheck.BaseDBTestCase;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
@@ -35,7 +34,7 @@ import static org.junit.Assert.assertTrue;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class DependencyCheckTaskTest {
|
||||
public class DependencyCheckTaskTest extends BaseDBTestCase {
|
||||
|
||||
@Rule
|
||||
public BuildFileRule buildFileRule = new BuildFileRule();
|
||||
@@ -44,18 +43,11 @@ public class DependencyCheckTaskTest {
|
||||
public ExpectedException expectedException = ExpectedException.none();
|
||||
|
||||
@Before
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
Settings.initialize();
|
||||
BaseDBTestCase.ensureDBExists();
|
||||
super.setUp();
|
||||
final String buildFile = this.getClass().getClassLoader().getResource("build.xml").getPath();
|
||||
buildFileRule.configureProject(buildFile);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
//no cleanup...
|
||||
//executeTarget("cleanup");
|
||||
Settings.cleanup(true);
|
||||
buildFileRule.configureProject(buildFile, LogLevel.VERBOSE.getLevel());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -125,6 +117,18 @@ public class DependencyCheckTaskTest {
|
||||
|
||||
// WHEN executing the ant task
|
||||
buildFileRule.executeTarget(antTaskName);
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println(buildFileRule.getError());
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println(buildFileRule.getFullLog());
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println("----------------------------------------------------------");
|
||||
|
||||
// THEN the ant task executed without error
|
||||
final File report = new File("target/suppression-report.html");
|
||||
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>2.1.2-SNAPSHOT</version>
|
||||
<version>3.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-cli</artifactId>
|
||||
|
||||
@@ -53,6 +53,10 @@ public class App {
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(App.class);
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private Settings settings = null;
|
||||
|
||||
/**
|
||||
* The main method for the application.
|
||||
@@ -61,17 +65,28 @@ public class App {
|
||||
*/
|
||||
public static void main(String[] args) {
|
||||
int exitCode = 0;
|
||||
try {
|
||||
Settings.initialize();
|
||||
final App app = new App();
|
||||
exitCode = app.run(args);
|
||||
LOGGER.debug("Exit code: {}", exitCode);
|
||||
} finally {
|
||||
Settings.cleanup(true);
|
||||
}
|
||||
final App app = new App();
|
||||
exitCode = app.run(args);
|
||||
LOGGER.debug("Exit code: {}", exitCode);
|
||||
System.exit(exitCode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds the App object.
|
||||
*/
|
||||
public App() {
|
||||
settings = new Settings();
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds the App object; this method is used for testing.
|
||||
*
|
||||
* @param settings the configured settings
|
||||
*/
|
||||
protected App(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Main CLI entry-point into the application.
|
||||
*
|
||||
@@ -80,7 +95,7 @@ public class App {
|
||||
*/
|
||||
public int run(String[] args) {
|
||||
int exitCode = 0;
|
||||
final CliParser cli = new CliParser();
|
||||
final CliParser cli = new CliParser(settings);
|
||||
|
||||
try {
|
||||
cli.parse(args);
|
||||
@@ -109,10 +124,11 @@ public class App {
|
||||
LOGGER.error(ex.getMessage());
|
||||
LOGGER.debug("Error loading properties file", ex);
|
||||
exitCode = -4;
|
||||
return exitCode;
|
||||
}
|
||||
File db;
|
||||
try {
|
||||
db = new File(Settings.getDataDirectory(), Settings.getString(Settings.KEYS.DB_FILE_NAME, "dc.h2.db"));
|
||||
db = new File(settings.getDataDirectory(), settings.getString(Settings.KEYS.DB_FILE_NAME, "dc.h2.db"));
|
||||
if (db.exists()) {
|
||||
if (db.delete()) {
|
||||
LOGGER.info("Database file purged; local copy of the NVD has been removed");
|
||||
@@ -127,6 +143,8 @@ public class App {
|
||||
} catch (IOException ex) {
|
||||
LOGGER.error("Unable to delete the database");
|
||||
exitCode = -7;
|
||||
} finally {
|
||||
settings.cleanup();
|
||||
}
|
||||
}
|
||||
} else if (cli.isGetVersion()) {
|
||||
@@ -138,6 +156,7 @@ public class App {
|
||||
LOGGER.error(ex.getMessage());
|
||||
LOGGER.debug("Error loading properties file", ex);
|
||||
exitCode = -4;
|
||||
return exitCode;
|
||||
}
|
||||
try {
|
||||
runUpdateOnly();
|
||||
@@ -147,6 +166,8 @@ public class App {
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.error(ex.getMessage());
|
||||
exitCode = -9;
|
||||
} finally {
|
||||
settings.cleanup();
|
||||
}
|
||||
} else if (cli.isRunScan()) {
|
||||
try {
|
||||
@@ -155,6 +176,7 @@ public class App {
|
||||
LOGGER.error(ex.getMessage());
|
||||
LOGGER.debug("Error loading properties file", ex);
|
||||
exitCode = -4;
|
||||
return exitCode;
|
||||
}
|
||||
try {
|
||||
final String[] scanFiles = cli.getScanFiles();
|
||||
@@ -183,6 +205,8 @@ public class App {
|
||||
for (Throwable e : ex.getExceptions()) {
|
||||
LOGGER.error(e.getMessage());
|
||||
}
|
||||
} finally {
|
||||
settings.cleanup();
|
||||
}
|
||||
} else {
|
||||
cli.printHelp();
|
||||
@@ -221,7 +245,7 @@ public class App {
|
||||
final List<String> antStylePaths = getPaths(files);
|
||||
final Set<File> paths = scanAntStylePaths(antStylePaths, symLinkDepth, excludes);
|
||||
|
||||
engine = new Engine();
|
||||
engine = new Engine(settings);
|
||||
engine.scan(paths);
|
||||
|
||||
ExceptionCollection exCol = null;
|
||||
@@ -250,7 +274,7 @@ public class App {
|
||||
return determineReturnCode(engine, cvssFailScore);
|
||||
} finally {
|
||||
if (engine != null) {
|
||||
engine.cleanup();
|
||||
engine.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -288,7 +312,6 @@ public class App {
|
||||
* @param excludes an array of ant style excludes
|
||||
* @return returns the set of identified files
|
||||
* @throws InvalidScanPathException thrown when the scan path is invalid
|
||||
* @throws IllegalStateException
|
||||
*/
|
||||
private Set<File> scanAntStylePaths(List<String> antStylePaths, int symLinkDepth, String[] excludes)
|
||||
throws InvalidScanPathException {
|
||||
@@ -359,7 +382,7 @@ public class App {
|
||||
* connection to the database could not be established
|
||||
*/
|
||||
private void runUpdateOnly() throws UpdateException, DatabaseException {
|
||||
try (Engine engine = new Engine()) {
|
||||
try (Engine engine = new Engine(settings)) {
|
||||
engine.doUpdates();
|
||||
}
|
||||
}
|
||||
@@ -401,7 +424,7 @@ public class App {
|
||||
|
||||
if (propertiesFile != null) {
|
||||
try {
|
||||
Settings.mergeProperties(propertiesFile);
|
||||
settings.mergeProperties(propertiesFile);
|
||||
} catch (FileNotFoundException ex) {
|
||||
throw new InvalidSettingException("Unable to find properties file '" + propertiesFile.getPath() + "'", ex);
|
||||
} catch (IOException ex) {
|
||||
@@ -413,65 +436,65 @@ public class App {
|
||||
// on the command line. This is true of other boolean values set below not using the setBooleanIfNotNull.
|
||||
final boolean nexusUsesProxy = cli.isNexusUsesProxy();
|
||||
if (dataDirectory != null) {
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
|
||||
} else if (System.getProperty("basedir") != null) {
|
||||
final File dataDir = new File(System.getProperty("basedir"), "data");
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
} else {
|
||||
final File jarPath = new File(App.class.getProtectionDomain().getCodeSource().getLocation().getPath());
|
||||
final File base = jarPath.getParentFile();
|
||||
final String sub = Settings.getString(Settings.KEYS.DATA_DIRECTORY);
|
||||
final String sub = settings.getString(Settings.KEYS.DATA_DIRECTORY);
|
||||
final File dataDir = new File(base, sub);
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
}
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUser);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPass);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
|
||||
Settings.setIntIfNotNull(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
|
||||
settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUser);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPass);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
|
||||
settings.setIntIfNotNull(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
|
||||
|
||||
Settings.setArrayIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFiles);
|
||||
settings.setArrayIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFiles);
|
||||
|
||||
//File Type Analyzer Settings
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, experimentalEnabled);
|
||||
settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, experimentalEnabled);
|
||||
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_JAR_ENABLED, !cli.isJarDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, !cli.isArchiveDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, !cli.isPythonDistributionDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, !cli.isPythonPackageDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, !cli.isAutoconfDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_CMAKE_ENABLED, !cli.isCmakeDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, !cli.isNuspecDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, !cli.isAssemblyDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, !cli.isBundleAuditDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, !cli.isOpenSSLDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, !cli.isComposerDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, !cli.isNodeJsDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NSP_PACKAGE_ENABLED, !cli.isNspDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, !cli.isSwiftPackageAnalyzerDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, !cli.isCocoapodsAnalyzerDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, !cli.isRubyGemspecDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, !cli.isCentralDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, !cli.isNexusDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_JAR_ENABLED, !cli.isJarDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, !cli.isArchiveDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, !cli.isPythonDistributionDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, !cli.isPythonPackageDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, !cli.isAutoconfDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_CMAKE_ENABLED, !cli.isCmakeDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, !cli.isNuspecDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, !cli.isAssemblyDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, !cli.isBundleAuditDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, !cli.isOpenSSLDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, !cli.isComposerDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, !cli.isNodeJsDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_NSP_PACKAGE_ENABLED, !cli.isNspDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, !cli.isSwiftPackageAnalyzerDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, !cli.isCocoapodsAnalyzerDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, !cli.isRubyGemspecDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, !cli.isCentralDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, !cli.isNexusDisabled());
|
||||
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, cli.getPathToBundleAudit());
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, additionalZipExtensions);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, cli.getPathToBundleAudit());
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, additionalZipExtensions);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
if (cveBase12 != null && !cveBase12.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.CVE_SCHEMA_1_2, cveBase12);
|
||||
Settings.setString(Settings.KEYS.CVE_SCHEMA_2_0, cveBase20);
|
||||
Settings.setString(Settings.KEYS.CVE_MODIFIED_12_URL, cveMod12);
|
||||
Settings.setString(Settings.KEYS.CVE_MODIFIED_20_URL, cveMod20);
|
||||
settings.setString(Settings.KEYS.CVE_SCHEMA_1_2, cveBase12);
|
||||
settings.setString(Settings.KEYS.CVE_SCHEMA_2_0, cveBase20);
|
||||
settings.setString(Settings.KEYS.CVE_MODIFIED_12_URL, cveMod12);
|
||||
settings.setString(Settings.KEYS.CVE_MODIFIED_20_URL, cveMod20);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -53,6 +53,19 @@ public final class CliParser {
|
||||
* Indicates whether the arguments are valid.
|
||||
*/
|
||||
private boolean isValid = true;
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private final Settings settings;
|
||||
|
||||
/**
|
||||
* Constructs a new CLI Parser object with the configured settings.
|
||||
*
|
||||
* @param settings the configured settings
|
||||
*/
|
||||
public CliParser(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the arguments passed in and captures the results for later use.
|
||||
@@ -582,7 +595,7 @@ public final class CliParser {
|
||||
private boolean hasDisableOption(String argument, String setting) {
|
||||
if (line == null || !line.hasOption(argument)) {
|
||||
try {
|
||||
return !Settings.getBoolean(setting);
|
||||
return !settings.getBoolean(setting);
|
||||
} catch (InvalidSettingException ise) {
|
||||
LOGGER.warn("Invalid property setting '{}' defaulting to false", setting);
|
||||
return false;
|
||||
@@ -801,7 +814,7 @@ public final class CliParser {
|
||||
// still honor the property if it's set.
|
||||
if (line == null || !line.hasOption(ARGUMENT.NEXUS_USES_PROXY)) {
|
||||
try {
|
||||
return Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY);
|
||||
return settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY);
|
||||
} catch (InvalidSettingException ise) {
|
||||
return true;
|
||||
}
|
||||
@@ -823,10 +836,10 @@ public final class CliParser {
|
||||
final String helpMsg = String.format("%n%s"
|
||||
+ " can be used to identify if there are any known CVE vulnerabilities in libraries utilized by an application. "
|
||||
+ "%s will automatically update required data from the Internet, such as the CVE and CPE data files from nvd.nist.gov.%n%n",
|
||||
Settings.getString("application.name", "DependencyCheck"),
|
||||
Settings.getString("application.name", "DependencyCheck"));
|
||||
settings.getString("application.name", "DependencyCheck"),
|
||||
settings.getString("application.name", "DependencyCheck"));
|
||||
|
||||
formatter.printHelp(Settings.getString("application.name", "DependencyCheck"),
|
||||
formatter.printHelp(settings.getString("application.name", "DependencyCheck"),
|
||||
helpMsg,
|
||||
options,
|
||||
"",
|
||||
@@ -1054,8 +1067,8 @@ public final class CliParser {
|
||||
*/
|
||||
public void printVersionInfo() {
|
||||
final String version = String.format("%s version %s",
|
||||
Settings.getString(Settings.KEYS.APPLICATION_NAME, "dependency-check"),
|
||||
Settings.getString(Settings.KEYS.APPLICATION_VERSION, "Unknown"));
|
||||
settings.getString(Settings.KEYS.APPLICATION_NAME, "dependency-check"),
|
||||
settings.getString(Settings.KEYS.APPLICATION_VERSION, "Unknown"));
|
||||
System.out.println(version);
|
||||
}
|
||||
|
||||
|
||||
@@ -30,8 +30,6 @@ import java.util.Map;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.UnrecognizedOptionException;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
@@ -42,7 +40,7 @@ import org.owasp.dependencycheck.utils.Settings.KEYS;
|
||||
/**
|
||||
* Tests for the {@link AppTest} class.
|
||||
*/
|
||||
public class AppTest {
|
||||
public class AppTest extends BaseTest {
|
||||
|
||||
/**
|
||||
* Test rule for asserting exceptions and their contents.
|
||||
@@ -50,29 +48,13 @@ public class AppTest {
|
||||
@Rule
|
||||
public ExpectedException expectedException = ExpectedException.none();
|
||||
|
||||
/**
|
||||
* Initialize the {@link Settings} singleton.
|
||||
*/
|
||||
@Before
|
||||
public void setUp() {
|
||||
Settings.initialize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean the {@link Settings} singleton.
|
||||
*/
|
||||
@After
|
||||
public void tearDown() {
|
||||
Settings.cleanup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of ensureCanonicalPath method, of class App.
|
||||
*/
|
||||
@Test
|
||||
public void testEnsureCanonicalPath() {
|
||||
String file = "../*.jar";
|
||||
App instance = new App();
|
||||
App instance = new App(getSettings());
|
||||
String result = instance.ensureCanonicalPath(file);
|
||||
assertFalse(result.contains(".."));
|
||||
assertTrue(result.endsWith("*.jar"));
|
||||
@@ -85,7 +67,7 @@ public class AppTest {
|
||||
|
||||
/**
|
||||
* Assert that boolean properties can be set on the CLI and parsed into the
|
||||
* {@link Settings} singleton.
|
||||
* {@link Settings}.
|
||||
*
|
||||
* @throws Exception the unexpected {@link Exception}.
|
||||
*/
|
||||
@@ -165,13 +147,13 @@ public class AppTest {
|
||||
String[] args = {"-P", prop.getAbsolutePath(), "--suppression", "another-file.xml"};
|
||||
|
||||
// WHEN parsing the CLI arguments
|
||||
final CliParser cli = new CliParser();
|
||||
final CliParser cli = new CliParser(getSettings());
|
||||
cli.parse(args);
|
||||
final App classUnderTest = new App();
|
||||
final App classUnderTest = new App(getSettings());
|
||||
classUnderTest.populateSettings(cli);
|
||||
|
||||
// THEN the suppression file is set in the settings singleton for use in the application core
|
||||
assertThat("Expected the suppression file to be set in the Settings singleton", Settings.getString(KEYS.SUPPRESSION_FILE), is("another-file.xml"));
|
||||
// THEN the suppression file is set in the settings for use in the application core
|
||||
assertThat("Expected the suppression file to be set in the Settings", getSettings().getString(KEYS.SUPPRESSION_FILE), is("another-file.xml"));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -188,31 +170,25 @@ public class AppTest {
|
||||
String[] args = {"-P", prop.getAbsolutePath(), "--suppression", "first-file.xml", "another-file.xml"};
|
||||
|
||||
// WHEN parsing the CLI arguments
|
||||
final CliParser cli = new CliParser();
|
||||
final CliParser cli = new CliParser(getSettings());
|
||||
cli.parse(args);
|
||||
final App classUnderTest = new App();
|
||||
final App classUnderTest = new App(getSettings());
|
||||
classUnderTest.populateSettings(cli);
|
||||
|
||||
// THEN the suppression file is set in the settings singleton for use in the application core
|
||||
assertThat("Expected the suppression files to be set in the Settings singleton with a separator", Settings.getString(KEYS.SUPPRESSION_FILE), is("first-file.xml,another-file.xml"));
|
||||
// THEN the suppression file is set in the settings for use in the application core
|
||||
assertThat("Expected the suppression files to be set in the Settings with a separator", getSettings().getString(KEYS.SUPPRESSION_FILE), is("first-file.xml,another-file.xml"));
|
||||
}
|
||||
|
||||
private boolean testBooleanProperties(String[] args, Map<String, Boolean> expected) throws URISyntaxException, FileNotFoundException, ParseException, InvalidSettingException {
|
||||
Settings.initialize();
|
||||
try {
|
||||
final CliParser cli = new CliParser();
|
||||
cli.parse(args);
|
||||
App instance = new App();
|
||||
instance.populateSettings(cli);
|
||||
boolean results = true;
|
||||
for (Map.Entry<String, Boolean> entry : expected.entrySet()) {
|
||||
results &= Settings.getBoolean(entry.getKey()) == entry.getValue();
|
||||
}
|
||||
|
||||
return results;
|
||||
} finally {
|
||||
Settings.cleanup();
|
||||
this.reloadSettings();
|
||||
final CliParser cli = new CliParser(getSettings());
|
||||
cli.parse(args);
|
||||
App instance = new App(getSettings());
|
||||
instance.populateSettings(cli);
|
||||
boolean results = true;
|
||||
for (Map.Entry<String, Boolean> entry : expected.entrySet()) {
|
||||
results &= getSettings().getBoolean(entry.getKey()) == entry.getValue();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,62 @@
|
||||
/*
|
||||
* Copyright 2014 OWASP.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.owasp.dependencycheck;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class BaseTest {
|
||||
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private Settings settings;
|
||||
|
||||
/**
|
||||
* Initialize the {@link Settings}.
|
||||
*/
|
||||
@Before
|
||||
public void setUp() {
|
||||
settings = new Settings();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean the {@link Settings}.
|
||||
*/
|
||||
@After
|
||||
public void tearDown() {
|
||||
settings.cleanup(true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the settings for the test cases.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
protected Settings getSettings() {
|
||||
return settings;
|
||||
}
|
||||
|
||||
protected void reloadSettings() {
|
||||
tearDown();
|
||||
setUp();
|
||||
}
|
||||
}
|
||||
@@ -33,17 +33,7 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class CliParserTest {
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() throws Exception {
|
||||
Settings.initialize();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDownClass() throws Exception {
|
||||
Settings.cleanup(true);
|
||||
}
|
||||
public class CliParserTest extends BaseTest {
|
||||
|
||||
/**
|
||||
* Test of parse method, of class CliParser.
|
||||
@@ -59,7 +49,7 @@ public class CliParserTest {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(baos));
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
instance.parse(args);
|
||||
|
||||
Assert.assertFalse(instance.isGetVersion());
|
||||
@@ -78,7 +68,7 @@ public class CliParserTest {
|
||||
String[] args = {"-help"};
|
||||
PrintStream out = System.out;
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
instance.parse(args);
|
||||
|
||||
Assert.assertFalse(instance.isGetVersion());
|
||||
@@ -96,7 +86,7 @@ public class CliParserTest {
|
||||
|
||||
String[] args = {"-version"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
instance.parse(args);
|
||||
Assert.assertTrue(instance.isGetVersion());
|
||||
Assert.assertFalse(instance.isGetHelp());
|
||||
@@ -114,7 +104,7 @@ public class CliParserTest {
|
||||
|
||||
String[] args = {"--failOnCVSS"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
try {
|
||||
instance.parse(args);
|
||||
} catch (ParseException ex) {
|
||||
@@ -135,7 +125,7 @@ public class CliParserTest {
|
||||
|
||||
String[] args = {"--failOnCVSS","bad"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
instance.parse(args);
|
||||
Assert.assertEquals("Default should be 11", 11, instance.getFailOnCVSS());
|
||||
Assert.assertFalse(instance.isGetVersion());
|
||||
@@ -153,7 +143,7 @@ public class CliParserTest {
|
||||
|
||||
String[] args = {"--failOnCVSS","6"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
instance.parse(args);
|
||||
Assert.assertEquals(6, instance.getFailOnCVSS());
|
||||
Assert.assertFalse(instance.isGetVersion());
|
||||
@@ -178,7 +168,7 @@ public class CliParserTest {
|
||||
System.setOut(new PrintStream(baos_out));
|
||||
System.setErr(new PrintStream(baos_err));
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
|
||||
try {
|
||||
instance.parse(args);
|
||||
@@ -200,7 +190,7 @@ public class CliParserTest {
|
||||
|
||||
String[] args = {"-scan"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
|
||||
try {
|
||||
instance.parse(args);
|
||||
@@ -223,7 +213,7 @@ public class CliParserTest {
|
||||
|
||||
String[] args = {"-scan", "jar.that.does.not.exist", "-app", "test"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
try {
|
||||
instance.parse(args);
|
||||
} catch (FileNotFoundException ex) {
|
||||
@@ -245,7 +235,7 @@ public class CliParserTest {
|
||||
File path = new File(this.getClass().getClassLoader().getResource("checkSumTest.file").toURI().getPath());
|
||||
String[] args = {"-scan", path.getCanonicalPath(), "-out", "./", "-app", "test"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
instance.parse(args);
|
||||
|
||||
Assert.assertEquals(path.getCanonicalPath(), instance.getScanFiles()[0]);
|
||||
@@ -267,7 +257,7 @@ public class CliParserTest {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(baos));
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
instance.printVersionInfo();
|
||||
try {
|
||||
baos.flush();
|
||||
@@ -296,7 +286,7 @@ public class CliParserTest {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(baos));
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
String[] args = {"-h"};
|
||||
instance.parse(args);
|
||||
instance.printHelp();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
autoupdate=false
|
||||
|
||||
somethingmadeup=test
|
||||
analyzer.experimental.enabled=false
|
||||
analyzer.jar.enabled=true
|
||||
analyzer.archive.enabled=true
|
||||
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>2.1.2-SNAPSHOT</version>
|
||||
<version>3.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-core</artifactId>
|
||||
|
||||
@@ -21,12 +21,12 @@ import org.owasp.dependencycheck.analyzer.Analyzer;
|
||||
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Callable;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* Task to support parallelism of dependency-check analysis. Analyses a single
|
||||
@@ -34,6 +34,7 @@ import java.util.concurrent.Callable;
|
||||
*
|
||||
* @author Stefan Neuhaus
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class AnalysisTask implements Callable<Void> {
|
||||
|
||||
/**
|
||||
@@ -57,10 +58,6 @@ public class AnalysisTask implements Callable<Void> {
|
||||
* The list of exceptions that may occur during analysis.
|
||||
*/
|
||||
private final List<Throwable> exceptions;
|
||||
/**
|
||||
* A reference to the global settings object.
|
||||
*/
|
||||
private final Settings settings;
|
||||
|
||||
/**
|
||||
* Creates a new analysis task.
|
||||
@@ -70,16 +67,12 @@ public class AnalysisTask implements Callable<Void> {
|
||||
* @param engine the dependency-check engine
|
||||
* @param exceptions exceptions that occur during analysis will be added to
|
||||
* this collection of exceptions
|
||||
* @param settings a reference to the global settings object; this is
|
||||
* necessary so that when the thread is started the dependencies have a
|
||||
* correct reference to the global settings.
|
||||
*/
|
||||
AnalysisTask(Analyzer analyzer, Dependency dependency, Engine engine, List<Throwable> exceptions, Settings settings) {
|
||||
AnalysisTask(Analyzer analyzer, Dependency dependency, Engine engine, List<Throwable> exceptions) {
|
||||
this.analyzer = analyzer;
|
||||
this.dependency = dependency;
|
||||
this.engine = engine;
|
||||
this.exceptions = exceptions;
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -89,26 +82,20 @@ public class AnalysisTask implements Callable<Void> {
|
||||
*/
|
||||
@Override
|
||||
public Void call() {
|
||||
try {
|
||||
Settings.setInstance(settings);
|
||||
|
||||
if (shouldAnalyze()) {
|
||||
LOGGER.debug("Begin Analysis of '{}' ({})", dependency.getActualFilePath(), analyzer.getName());
|
||||
try {
|
||||
analyzer.analyze(dependency, engine);
|
||||
} catch (AnalysisException ex) {
|
||||
LOGGER.warn("An error occurred while analyzing '{}' ({}).", dependency.getActualFilePath(), analyzer.getName());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
} catch (Throwable ex) {
|
||||
LOGGER.warn("An unexpected error occurred during analysis of '{}' ({}): {}",
|
||||
dependency.getActualFilePath(), analyzer.getName(), ex.getMessage());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
}
|
||||
if (shouldAnalyze()) {
|
||||
LOGGER.debug("Begin Analysis of '{}' ({})", dependency.getActualFilePath(), analyzer.getName());
|
||||
try {
|
||||
analyzer.analyze(dependency, engine);
|
||||
} catch (AnalysisException ex) {
|
||||
LOGGER.warn("An error occurred while analyzing '{}' ({}).", dependency.getActualFilePath(), analyzer.getName());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
} catch (Throwable ex) {
|
||||
LOGGER.warn("An unexpected error occurred during analysis of '{}' ({}): {}",
|
||||
dependency.getActualFilePath(), analyzer.getName(), ex.getMessage());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
}
|
||||
} finally {
|
||||
Settings.cleanup(false);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -123,7 +110,6 @@ public class AnalysisTask implements Callable<Void> {
|
||||
final FileTypeAnalyzer fileTypeAnalyzer = (FileTypeAnalyzer) analyzer;
|
||||
return fileTypeAnalyzer.accept(dependency.getActualFile());
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,6 +42,7 @@ import org.slf4j.LoggerFactory;
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
@@ -57,8 +58,14 @@ import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
import org.owasp.dependencycheck.exception.H2DBLockException;
|
||||
import org.owasp.dependencycheck.utils.H2DBLock;
|
||||
|
||||
//CSOFF: AvoidStarImport
|
||||
import static org.owasp.dependencycheck.analyzer.AnalysisPhase.*;
|
||||
//CSON: AvoidStarImport
|
||||
|
||||
|
||||
/**
|
||||
* Scans files, directories, etc. for Dependencies. Analyzers are loaded and
|
||||
@@ -68,6 +75,7 @@ import static org.owasp.dependencycheck.analyzer.AnalysisPhase.*;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@NotThreadSafe
|
||||
public class Engine implements FileFilter, AutoCloseable {
|
||||
|
||||
/**
|
||||
@@ -150,6 +158,10 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
* The list of dependencies.
|
||||
*/
|
||||
private final List<Dependency> dependencies = Collections.synchronizedList(new ArrayList<Dependency>());
|
||||
/**
|
||||
* The external view of the dependency list.
|
||||
*/
|
||||
private Dependency[] dependenciesExternalView = null;
|
||||
/**
|
||||
* A Map of analyzers grouped by Analysis phase.
|
||||
*/
|
||||
@@ -170,7 +182,7 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
* The ClassLoader to use when dynamically loading Analyzer and Update
|
||||
* services.
|
||||
*/
|
||||
private ClassLoader serviceClassLoader;
|
||||
private final ClassLoader serviceClassLoader;
|
||||
/**
|
||||
* A reference to the database.
|
||||
*/
|
||||
@@ -179,30 +191,38 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
* The Logger for use throughout the class.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Engine.class);
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private final Settings settings;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Mode#STANDALONE} Engine.
|
||||
*
|
||||
* @param settings reference to the configured settings
|
||||
*/
|
||||
public Engine() {
|
||||
this(Mode.STANDALONE);
|
||||
public Engine(Settings settings) {
|
||||
this(Mode.STANDALONE, settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new Engine.
|
||||
*
|
||||
* @param mode the mode of operation
|
||||
* @param settings reference to the configured settings
|
||||
*/
|
||||
public Engine(Mode mode) {
|
||||
this(Thread.currentThread().getContextClassLoader(), mode);
|
||||
public Engine(Mode mode, Settings settings) {
|
||||
this(Thread.currentThread().getContextClassLoader(), mode, settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Mode#STANDALONE} Engine.
|
||||
*
|
||||
* @param serviceClassLoader a reference the class loader being used
|
||||
* @param settings reference to the configured settings
|
||||
*/
|
||||
public Engine(ClassLoader serviceClassLoader) {
|
||||
this(serviceClassLoader, Mode.STANDALONE);
|
||||
public Engine(ClassLoader serviceClassLoader, Settings settings) {
|
||||
this(serviceClassLoader, Mode.STANDALONE, settings);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -210,8 +230,10 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
*
|
||||
* @param serviceClassLoader a reference the class loader being used
|
||||
* @param mode the mode of the engine
|
||||
* @param settings reference to the configured settings
|
||||
*/
|
||||
public Engine(ClassLoader serviceClassLoader, Mode mode) {
|
||||
public Engine(ClassLoader serviceClassLoader, Mode mode, Settings settings) {
|
||||
this.settings = settings;
|
||||
this.serviceClassLoader = serviceClassLoader;
|
||||
this.mode = mode;
|
||||
initializeEngine();
|
||||
@@ -225,30 +247,22 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
* database
|
||||
*/
|
||||
protected final void initializeEngine() {
|
||||
if (mode.isDatabseRequired()) {
|
||||
ConnectionFactory.initialize();
|
||||
}
|
||||
loadAnalyzers();
|
||||
}
|
||||
|
||||
/**
|
||||
* Properly cleans up resources allocated during analysis.
|
||||
*/
|
||||
public void cleanup() {
|
||||
@Override
|
||||
public void close() {
|
||||
if (mode.isDatabseRequired()) {
|
||||
if (database != null) {
|
||||
database.close();
|
||||
database = null;
|
||||
}
|
||||
ConnectionFactory.cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
cleanup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the analyzers specified in the configuration file (or system
|
||||
* properties).
|
||||
@@ -260,10 +274,16 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
for (AnalysisPhase phase : mode.getPhases()) {
|
||||
analyzers.put(phase, new ArrayList<Analyzer>());
|
||||
}
|
||||
|
||||
final AnalyzerService service = new AnalyzerService(serviceClassLoader);
|
||||
boolean loadExperimental = false;
|
||||
try {
|
||||
loadExperimental = settings.getBoolean(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, false);
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.trace("Experimenal setting not configured; defaulting to false");
|
||||
}
|
||||
final AnalyzerService service = new AnalyzerService(serviceClassLoader, loadExperimental);
|
||||
final List<Analyzer> iterator = service.getAnalyzers(mode.getPhases());
|
||||
for (Analyzer a : iterator) {
|
||||
a.initialize(this.settings);
|
||||
analyzers.get(a.getAnalysisPhase()).add(a);
|
||||
if (a instanceof FileTypeAnalyzer) {
|
||||
this.fileTypeAnalyzers.add((FileTypeAnalyzer) a);
|
||||
@@ -282,18 +302,44 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the dependencies identified. The returned list is a reference to the
|
||||
* engine's synchronized list. <b>You must synchronize on the returned
|
||||
* list</b> when you modify and iterate over it from multiple threads. E.g.
|
||||
* this holds for analyzers supporting parallel processing during their
|
||||
* analysis phase.
|
||||
* Adds a dependency.
|
||||
*
|
||||
* @param dependency the dependency to add
|
||||
*/
|
||||
public synchronized void addDependency(Dependency dependency) {
|
||||
dependencies.add(dependency);
|
||||
dependenciesExternalView = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sorts the dependency list.
|
||||
*/
|
||||
public synchronized void sortDependencies() {
|
||||
//TODO - is this actually necassary????
|
||||
Collections.sort(dependencies);
|
||||
dependenciesExternalView = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the dependency.
|
||||
*
|
||||
* @param dependency the dependency to remove.
|
||||
*/
|
||||
public synchronized void removeDependency(Dependency dependency) {
|
||||
dependencies.remove(dependency);
|
||||
dependenciesExternalView = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of the dependencies as an array.
|
||||
*
|
||||
* @return the dependencies identified
|
||||
* @see Collections#synchronizedList(List)
|
||||
* @see Analyzer#supportsParallelProcessing()
|
||||
*/
|
||||
public synchronized List<Dependency> getDependencies() {
|
||||
return dependencies;
|
||||
public synchronized Dependency[] getDependencies() {
|
||||
if (dependenciesExternalView == null) {
|
||||
dependenciesExternalView = dependencies.toArray(new Dependency[dependencies.size()]);
|
||||
}
|
||||
return dependenciesExternalView;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -304,6 +350,7 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
public synchronized void setDependencies(List<Dependency> dependencies) {
|
||||
this.dependencies.clear();
|
||||
this.dependencies.addAll(dependencies);
|
||||
dependenciesExternalView = null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -540,7 +587,7 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
* @return the scanned dependency
|
||||
* @since v1.4.4
|
||||
*/
|
||||
protected Dependency scanFile(File file, String projectReference) {
|
||||
protected synchronized Dependency scanFile(File file, String projectReference) {
|
||||
Dependency dependency = null;
|
||||
if (file.isFile()) {
|
||||
if (accept(file)) {
|
||||
@@ -550,31 +597,31 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
}
|
||||
final String sha1 = dependency.getSha1sum();
|
||||
boolean found = false;
|
||||
synchronized (dependencies) {
|
||||
if (sha1 != null) {
|
||||
for (Dependency existing : dependencies) {
|
||||
if (sha1.equals(existing.getSha1sum())) {
|
||||
found = true;
|
||||
if (projectReference != null) {
|
||||
existing.addProjectReference(projectReference);
|
||||
}
|
||||
if (existing.getActualFilePath() != null && dependency.getActualFilePath() != null
|
||||
&& !existing.getActualFilePath().equals(dependency.getActualFilePath())) {
|
||||
existing.addRelatedDependency(dependency);
|
||||
} else {
|
||||
dependency = existing;
|
||||
}
|
||||
break;
|
||||
|
||||
if (sha1 != null) {
|
||||
for (Dependency existing : dependencies) {
|
||||
if (sha1.equals(existing.getSha1sum())) {
|
||||
found = true;
|
||||
if (projectReference != null) {
|
||||
existing.addProjectReference(projectReference);
|
||||
}
|
||||
if (existing.getActualFilePath() != null && dependency.getActualFilePath() != null
|
||||
&& !existing.getActualFilePath().equals(dependency.getActualFilePath())) {
|
||||
existing.addRelatedDependency(dependency);
|
||||
} else {
|
||||
dependency = existing;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
dependencies.add(dependency);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
LOGGER.debug("Path passed to scanFile(File) is not a file that can be scanned by dependency-check: {}. Skipping the file.", file);
|
||||
if (!found) {
|
||||
dependencies.add(dependency);
|
||||
dependenciesExternalView = null;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
LOGGER.debug("Path passed to scanFile(File) is not a file that can be scanned by dependency-check: {}. Skipping the file.", file);
|
||||
}
|
||||
return dependency;
|
||||
}
|
||||
@@ -662,15 +709,14 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
}
|
||||
boolean autoUpdate = true;
|
||||
try {
|
||||
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
|
||||
autoUpdate = settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.debug("Invalid setting for auto-update; using true.");
|
||||
exceptions.add(ex);
|
||||
}
|
||||
if (autoUpdate) {
|
||||
try {
|
||||
database = CveDB.getInstance();
|
||||
doUpdates();
|
||||
doUpdates(true);
|
||||
} catch (UpdateException ex) {
|
||||
exceptions.add(ex);
|
||||
LOGGER.warn("Unable to update Cached Web DataSource, using local "
|
||||
@@ -681,10 +727,10 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
if (ConnectionFactory.isH2Connection() && !ConnectionFactory.h2DataFileExists()) {
|
||||
if (ConnectionFactory.isH2Connection(settings) && !ConnectionFactory.h2DataFileExists(settings)) {
|
||||
throw new ExceptionCollection(new NoDataException("Autoupdate is disabled and the database does not exist"), true);
|
||||
} else {
|
||||
database = CveDB.getInstance();
|
||||
openDatabase(true, true);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new ExceptionCollection(new DatabaseException("Autoupdate is disabled and unable to connect to the database"), true);
|
||||
@@ -735,13 +781,11 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
* @param exceptions the collection of exceptions to collect
|
||||
* @return a collection of analysis tasks
|
||||
*/
|
||||
protected List<AnalysisTask> getAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) {
|
||||
protected synchronized List<AnalysisTask> getAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) {
|
||||
final List<AnalysisTask> result = new ArrayList<>();
|
||||
synchronized (dependencies) {
|
||||
for (final Dependency dependency : dependencies) {
|
||||
final AnalysisTask task = new AnalysisTask(analyzer, dependency, this, exceptions, Settings.getInstance());
|
||||
result.add(task);
|
||||
}
|
||||
for (final Dependency dependency : dependencies) {
|
||||
final AnalysisTask task = new AnalysisTask(analyzer, dependency, this, exceptions);
|
||||
result.add(task);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -766,14 +810,14 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
/**
|
||||
* Initializes the given analyzer.
|
||||
*
|
||||
* @param analyzer the analyzer to initialize
|
||||
* @param analyzer the analyzer to prepare
|
||||
* @throws InitializationException thrown when there is a problem
|
||||
* initializing the analyzer
|
||||
*/
|
||||
protected void initializeAnalyzer(Analyzer analyzer) throws InitializationException {
|
||||
try {
|
||||
LOGGER.debug("Initializing {}", analyzer.getName());
|
||||
analyzer.initialize();
|
||||
analyzer.prepare(this);
|
||||
} catch (InitializationException ex) {
|
||||
LOGGER.error("Exception occurred initializing {}.", analyzer.getName());
|
||||
LOGGER.debug("", ex);
|
||||
@@ -816,21 +860,126 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
* @throws UpdateException thrown if the operation fails
|
||||
*/
|
||||
public void doUpdates() throws UpdateException {
|
||||
doUpdates(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cycles through the cached web data sources and calls update on all of
|
||||
* them.
|
||||
*
|
||||
* @param remainOpen whether or not the database connection should remain
|
||||
* open
|
||||
* @throws UpdateException thrown if the operation fails
|
||||
*/
|
||||
public void doUpdates(boolean remainOpen) throws UpdateException {
|
||||
if (mode.isDatabseRequired()) {
|
||||
LOGGER.info("Checking for updates");
|
||||
final long updateStart = System.currentTimeMillis();
|
||||
final UpdateService service = new UpdateService(serviceClassLoader);
|
||||
final Iterator<CachedWebDataSource> iterator = service.getDataSources();
|
||||
while (iterator.hasNext()) {
|
||||
final CachedWebDataSource source = iterator.next();
|
||||
source.update();
|
||||
H2DBLock dblock = null;
|
||||
try {
|
||||
if (ConnectionFactory.isH2Connection(settings)) {
|
||||
dblock = new H2DBLock(settings);
|
||||
LOGGER.debug("locking for update");
|
||||
dblock.lock();
|
||||
}
|
||||
openDatabase(false, false);
|
||||
LOGGER.info("Checking for updates");
|
||||
final long updateStart = System.currentTimeMillis();
|
||||
final UpdateService service = new UpdateService(serviceClassLoader);
|
||||
final Iterator<CachedWebDataSource> iterator = service.getDataSources();
|
||||
while (iterator.hasNext()) {
|
||||
final CachedWebDataSource source = iterator.next();
|
||||
source.update(this);
|
||||
}
|
||||
database.close();
|
||||
database = null;
|
||||
LOGGER.info("Check for updates complete ({} ms)", System.currentTimeMillis() - updateStart);
|
||||
if (remainOpen) {
|
||||
openDatabase(true, false);
|
||||
}
|
||||
} catch (H2DBLockException ex) {
|
||||
throw new UpdateException("Unable to obtain an exclusive lock on the H2 database to perform updates", ex);
|
||||
} finally {
|
||||
if (dblock != null) {
|
||||
dblock.release();
|
||||
}
|
||||
}
|
||||
LOGGER.info("Check for updates complete ({} ms)", System.currentTimeMillis() - updateStart);
|
||||
} else {
|
||||
LOGGER.info("Skipping update check in evidence collection mode.");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* This method is only public for unit/integration testing. This method
|
||||
* should not be called by any integration that uses
|
||||
* dependency-check-core.</p>
|
||||
* <p>
|
||||
* Opens the database connection.</p>
|
||||
*/
|
||||
public void openDatabase() {
|
||||
openDatabase(false, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* This method is only public for unit/integration testing. This method
|
||||
* should not be called by any integration that uses
|
||||
* dependency-check-core.</p>
|
||||
* <p>
|
||||
* Opens the database connection; if readOnly is true a copy of the database
|
||||
* will be made.</p>
|
||||
*
|
||||
* @param readOnly whether or not the database connection should be readonly
|
||||
* @param lockRequired whether or not a lock needs to be acquired when
|
||||
* opening the database
|
||||
*/
|
||||
public void openDatabase(boolean readOnly, boolean lockRequired) {
|
||||
if (mode.isDatabseRequired() && database == null) {
|
||||
//needed to update schema any required schema changes
|
||||
database = new CveDB(settings);
|
||||
if (readOnly
|
||||
&& ConnectionFactory.isH2Connection(settings)
|
||||
&& settings.getString(Settings.KEYS.DB_CONNECTION_STRING).contains("file:%s")) {
|
||||
H2DBLock lock = null;
|
||||
try {
|
||||
final File db = ConnectionFactory.getH2DataFile(settings);
|
||||
if (db.isFile()) {
|
||||
database.close();
|
||||
if (lockRequired) {
|
||||
lock = new H2DBLock(settings);
|
||||
lock.lock();
|
||||
}
|
||||
LOGGER.debug("copying database");
|
||||
final File temp = settings.getTempDirectory();
|
||||
final File tempDB = new File(temp, db.getName());
|
||||
Files.copy(db.toPath(), tempDB.toPath());
|
||||
LOGGER.debug("copying complete '{}'", temp.toPath());
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, temp.getPath());
|
||||
final String connStr = settings.getString(Settings.KEYS.DB_CONNECTION_STRING);
|
||||
settings.setString(Settings.KEYS.DB_CONNECTION_STRING, connStr + "ACCESS_MODE_DATA=r");
|
||||
database = new CveDB(settings);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Unable to open db in read only mode", ex);
|
||||
} catch (H2DBLockException ex) {
|
||||
LOGGER.debug("Failed to obtain lock - unable to open db in read only mode", ex);
|
||||
} finally {
|
||||
if (lock != null) {
|
||||
lock.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a reference to the database.
|
||||
*
|
||||
* @return a reference to the database
|
||||
*/
|
||||
public CveDB getDatabase() {
|
||||
return this.database;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a full list of all of the analyzers. This is useful for reporting
|
||||
* which analyzers where used.
|
||||
@@ -876,6 +1025,15 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
return this.fileTypeAnalyzers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the configured settings.
|
||||
*
|
||||
* @return the configured settings
|
||||
*/
|
||||
public Settings getSettings() {
|
||||
return settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a file type analyzer. This has been added solely to assist in unit
|
||||
* testing the Engine.
|
||||
@@ -932,7 +1090,7 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
throw new UnsupportedOperationException("Cannot generate report in evidence collection mode.");
|
||||
}
|
||||
final DatabaseProperties prop = database.getDatabaseProperties();
|
||||
final ReportGenerator r = new ReportGenerator(applicationName, groupId, artifactId, version, dependencies, getAnalyzers(), prop);
|
||||
final ReportGenerator r = new ReportGenerator(applicationName, groupId, artifactId, version, dependencies, getAnalyzers(), prop, settings);
|
||||
try {
|
||||
r.write(outputDir.getAbsolutePath(), format);
|
||||
} catch (ReportException ex) {
|
||||
|
||||
@@ -20,6 +20,7 @@ package org.owasp.dependencycheck.agent;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
@@ -61,6 +62,7 @@ import org.slf4j.LoggerFactory;
|
||||
* @author Steve Springett
|
||||
*/
|
||||
@SuppressWarnings("unused")
|
||||
@NotThreadSafe
|
||||
public class DependencyCheckScanAgent {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="private fields">
|
||||
@@ -103,8 +105,8 @@ public class DependencyCheckScanAgent {
|
||||
*/
|
||||
private boolean autoUpdate = true;
|
||||
/**
|
||||
* Sets whether the data directory should be updated without performing a scan.
|
||||
* Default is false.
|
||||
* Sets whether the data directory should be updated without performing a
|
||||
* scan. Default is false.
|
||||
*/
|
||||
private boolean updateOnly = false;
|
||||
/**
|
||||
@@ -215,8 +217,12 @@ public class DependencyCheckScanAgent {
|
||||
*/
|
||||
private String pathToMono;
|
||||
/**
|
||||
* The path to optional dependency-check properties file. This will be
|
||||
* used to side-load additional user-defined properties.
|
||||
* The configured settings.
|
||||
*/
|
||||
private Settings settings;
|
||||
/**
|
||||
* The path to optional dependency-check properties file. This will be used
|
||||
* to side-load additional user-defined properties.
|
||||
* {@link Settings#mergeProperties(String)}
|
||||
*/
|
||||
private String propertiesFilePath;
|
||||
@@ -872,7 +878,7 @@ public class DependencyCheckScanAgent {
|
||||
populateSettings();
|
||||
final Engine engine;
|
||||
try {
|
||||
engine = new Engine();
|
||||
engine = new Engine(settings);
|
||||
} catch (DatabaseException ex) {
|
||||
throw new ExceptionCollection(ex, true);
|
||||
}
|
||||
@@ -912,20 +918,19 @@ public class DependencyCheckScanAgent {
|
||||
* proxy server, port, and connection timeout.
|
||||
*/
|
||||
private void populateSettings() {
|
||||
Settings.initialize();
|
||||
settings = new Settings();
|
||||
if (dataDirectory != null) {
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
|
||||
} else {
|
||||
final File jarPath = new File(DependencyCheckScanAgent.class.getProtectionDomain().getCodeSource().getLocation().getPath());
|
||||
final File base = jarPath.getParentFile();
|
||||
final String sub = Settings.getString(Settings.KEYS.DATA_DIRECTORY);
|
||||
final String sub = settings.getString(Settings.KEYS.DATA_DIRECTORY);
|
||||
final File dataDir = new File(base, sub);
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
}
|
||||
|
||||
if (propertiesFilePath != null) {
|
||||
try {
|
||||
Settings.mergeProperties(propertiesFilePath);
|
||||
settings.mergeProperties(propertiesFilePath);
|
||||
LOGGER.info("Successfully loaded user-defined properties");
|
||||
} catch (IOException e) {
|
||||
LOGGER.error("Unable to merge user-defined properties", e);
|
||||
@@ -933,30 +938,29 @@ public class DependencyCheckScanAgent {
|
||||
}
|
||||
}
|
||||
|
||||
LOGGER.info("Populating settings");
|
||||
Settings.setBoolean(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUsername);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPassword);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, centralAnalyzerEnabled);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_CENTRAL_URL, centralUrl);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, nexusAnalyzerEnabled);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
settings.setBoolean(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUsername);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPassword);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, centralAnalyzerEnabled);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_CENTRAL_URL, centralUrl);
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, nexusAnalyzerEnabled);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -988,9 +992,9 @@ public class DependencyCheckScanAgent {
|
||||
}
|
||||
throw new ScanAgentException("One or more exceptions occurred during analysis; please see the debug log for more details.", ex);
|
||||
} finally {
|
||||
Settings.cleanup(true);
|
||||
settings.cleanup(true);
|
||||
if (engine != null) {
|
||||
engine.cleanup();
|
||||
engine.close();
|
||||
}
|
||||
}
|
||||
return engine;
|
||||
@@ -1004,7 +1008,7 @@ public class DependencyCheckScanAgent {
|
||||
* @throws org.owasp.dependencycheck.exception.ScanAgentException thrown if
|
||||
* there is an exception executing the scan.
|
||||
*/
|
||||
private void checkForFailure(List<Dependency> dependencies) throws ScanAgentException {
|
||||
private void checkForFailure(Dependency[] dependencies) throws ScanAgentException {
|
||||
final StringBuilder ids = new StringBuilder();
|
||||
for (Dependency d : dependencies) {
|
||||
boolean addName = true;
|
||||
@@ -1024,7 +1028,6 @@ public class DependencyCheckScanAgent {
|
||||
final String msg = String.format("%n%nDependency-Check Failure:%n"
|
||||
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater than '%.1f': %s%n"
|
||||
+ "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString());
|
||||
|
||||
throw new ScanAgentException(msg);
|
||||
}
|
||||
}
|
||||
@@ -1035,7 +1038,7 @@ public class DependencyCheckScanAgent {
|
||||
*
|
||||
* @param dependencies a list of dependency objects
|
||||
*/
|
||||
private void showSummary(List<Dependency> dependencies) {
|
||||
private void showSummary(Dependency[] dependencies) {
|
||||
final StringBuilder summary = new StringBuilder();
|
||||
for (Dependency d : dependencies) {
|
||||
boolean firstEntry = true;
|
||||
|
||||
@@ -23,15 +23,17 @@ import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Base class for analyzers to avoid code duplication of initialize and close as
|
||||
* Base class for analyzers to avoid code duplication of prepare and close as
|
||||
* most analyzers do not need these methods.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public abstract class AbstractAnalyzer implements Analyzer {
|
||||
|
||||
/**
|
||||
@@ -42,6 +44,10 @@ public abstract class AbstractAnalyzer implements Analyzer {
|
||||
* A flag indicating whether or not the analyzer is enabled.
|
||||
*/
|
||||
private volatile boolean enabled = true;
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private Settings settings;
|
||||
|
||||
/**
|
||||
* Get the value of enabled.
|
||||
@@ -63,41 +69,57 @@ public abstract class AbstractAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
* Returns the configured settings.
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
* @return the configured settings
|
||||
*/
|
||||
protected abstract String getAnalyzerEnabledSettingKey();
|
||||
|
||||
/**
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a
|
||||
* WAR or EAR, the contents are extracted, scanned, and added to the list of
|
||||
* dependencies within the engine.
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the engine scanning
|
||||
* @throws AnalysisException thrown if there is an analysis exception
|
||||
*/
|
||||
protected abstract void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException;
|
||||
|
||||
/**
|
||||
* Initializes a given Analyzer. This will be skipped if the analyzer is
|
||||
* disabled.
|
||||
*
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
protected void initializeAnalyzer() throws InitializationException {
|
||||
protected Settings getSettings() {
|
||||
return settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes a given Analyzer. This will be skipped if the analyzer is
|
||||
* Initializes the analyzer with the configured settings.
|
||||
*
|
||||
* @param settings the configured settings to use
|
||||
*/
|
||||
@Override
|
||||
public void initialize(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the abstract analyzer.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
@Override
|
||||
public final void prepare(Engine engine) throws InitializationException {
|
||||
final String key = getAnalyzerEnabledSettingKey();
|
||||
try {
|
||||
this.setEnabled(settings.getBoolean(key, true));
|
||||
} catch (InvalidSettingException ex) {
|
||||
final String msg = String.format("Invalid setting for property '%s'", key);
|
||||
LOGGER.warn(msg);
|
||||
LOGGER.debug(msg, ex);
|
||||
}
|
||||
|
||||
if (isEnabled()) {
|
||||
prepareAnalyzer(engine);
|
||||
} else {
|
||||
LOGGER.debug("{} has been disabled", getName());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares a given Analyzer. This will be skipped if the analyzer is
|
||||
* disabled.
|
||||
*
|
||||
* @throws Exception thrown if there is an exception
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
protected void closeAnalyzer() throws Exception {
|
||||
// Intentionally empty, analyzer will override this if they must close a resource.
|
||||
protected void prepareAnalyzer(Engine engine) throws InitializationException {
|
||||
// Intentionally empty, analyzer will override this if they must prepare anything.
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -117,26 +139,15 @@ public abstract class AbstractAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* The initialize method does nothing for this Analyzer.
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a
|
||||
* WAR or EAR, the contents are extracted, scanned, and added to the list of
|
||||
* dependencies within the engine.
|
||||
*
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the engine scanning
|
||||
* @throws AnalysisException thrown if there is an analysis exception
|
||||
*/
|
||||
@Override
|
||||
public final void initialize() throws InitializationException {
|
||||
final String key = getAnalyzerEnabledSettingKey();
|
||||
try {
|
||||
this.setEnabled(Settings.getBoolean(key, true));
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.warn("Invalid setting for property '{}'", key);
|
||||
LOGGER.debug("", ex);
|
||||
}
|
||||
|
||||
if (isEnabled()) {
|
||||
initializeAnalyzer();
|
||||
} else {
|
||||
LOGGER.debug("{} has been disabled", getName());
|
||||
}
|
||||
}
|
||||
protected abstract void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException;
|
||||
|
||||
/**
|
||||
* The close method does nothing for this Analyzer.
|
||||
@@ -150,6 +161,16 @@ public abstract class AbstractAnalyzer implements Analyzer {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes a given Analyzer. This will be skipped if the analyzer is
|
||||
* disabled.
|
||||
*
|
||||
* @throws Exception thrown if there is an exception
|
||||
*/
|
||||
protected void closeAnalyzer() throws Exception {
|
||||
// Intentionally empty, analyzer will override this if they must close a resource.
|
||||
}
|
||||
|
||||
/**
|
||||
* The default is to support parallel processing.
|
||||
*
|
||||
@@ -157,8 +178,15 @@ public abstract class AbstractAnalyzer implements Analyzer {
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
//temporarily removing parallel processing from all analyzders until further examination of thread safety occurs.
|
||||
//return true;
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
protected abstract String getAnalyzerEnabledSettingKey();
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,125 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2017 Jeremy Long. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* This analyzer ensures dependencies that should be grouped together, to remove
|
||||
* excess noise from the report, are grouped. An example would be Spring, Spring
|
||||
* Beans, Spring MVC, etc. If they are all for the same version and have the
|
||||
* same relative path then these should be grouped into a single dependency
|
||||
* under the core/main library.</p>
|
||||
* <p>
|
||||
* Note, this grouping only works on dependencies with identified CVE
|
||||
* entries</p>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public abstract class AbstractDependencyComparingAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractDependencyComparingAnalyzer.class);
|
||||
|
||||
/**
|
||||
* a flag indicating if this analyzer has run. This analyzer only runs once.
|
||||
*/
|
||||
private boolean analyzed = false;
|
||||
|
||||
/**
|
||||
* Returns a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once. Note this is currently only used in the unit tests.
|
||||
*
|
||||
* @return a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once
|
||||
*/
|
||||
protected synchronized boolean getAnalyzed() {
|
||||
return analyzed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Does not support parallel processing as it only runs once and then
|
||||
* operates on <em>all</em> dependencies.
|
||||
*
|
||||
* @return whether or not parallel processing is enabled
|
||||
* @see #analyze(Dependency, Engine)
|
||||
*/
|
||||
@Override
|
||||
public final boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a set of dependencies. If they have been found to have the same
|
||||
* base path and the same set of identifiers they are likely related. The
|
||||
* related dependencies are bundled into a single reportable item.
|
||||
*
|
||||
* @param ignore this analyzer ignores the dependency being analyzed
|
||||
* @param engine the engine that is scanning the dependencies
|
||||
* @throws AnalysisException is thrown if there is an error reading the JAR
|
||||
* file.
|
||||
*/
|
||||
@Override
|
||||
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException {
|
||||
if (!analyzed) {
|
||||
analyzed = true;
|
||||
final Set<Dependency> dependenciesToRemove = new HashSet<>();
|
||||
|
||||
final Dependency[] dependencies = engine.getDependencies();
|
||||
if (dependencies.length < 2) {
|
||||
return;
|
||||
}
|
||||
for (int x = 0; x < dependencies.length - 1; x++) {
|
||||
final Dependency dependency = dependencies[x];
|
||||
if (!dependenciesToRemove.contains(dependency)) {
|
||||
for (int y = x + 1; y < dependencies.length; y++) {
|
||||
final Dependency nextDependency = dependencies[y];
|
||||
if (evaluateDependencies(dependency, nextDependency, dependenciesToRemove)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (Dependency d : dependenciesToRemove) {
|
||||
engine.removeDependency(d);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluates the dependencies
|
||||
*
|
||||
* @param dependency a dependency to compare
|
||||
* @param nextDependency a dependency to compare
|
||||
* @param dependenciesToRemove a set of dependencies that will be removed
|
||||
* @return true if a dependency is removed; otherwise false
|
||||
*/
|
||||
protected abstract boolean evaluateDependencies(final Dependency dependency,
|
||||
final Dependency nextDependency, final Set<Dependency> dependenciesToRemove);
|
||||
}
|
||||
@@ -25,6 +25,8 @@ import java.io.FileFilter;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
@@ -33,6 +35,7 @@ import org.owasp.dependencycheck.exception.InitializationException;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implements FileTypeAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Field definitions, getters, and setters ">
|
||||
@@ -45,16 +48,6 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
*/
|
||||
private boolean filesMatched = false;
|
||||
|
||||
/**
|
||||
* Get the value of filesMatched. A flag indicating whether the scan
|
||||
* included any file types this analyzer supports.
|
||||
*
|
||||
* @return the value of filesMatched
|
||||
*/
|
||||
protected boolean isFilesMatched() {
|
||||
return filesMatched;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of filesMatched. A flag indicating whether the scan
|
||||
* included any file types this analyzer supports.
|
||||
@@ -70,13 +63,14 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
/**
|
||||
* Initializes the analyzer.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if there is an exception during
|
||||
* initialization
|
||||
*/
|
||||
@Override
|
||||
protected final void initializeAnalyzer() throws InitializationException {
|
||||
protected final void prepareAnalyzer(Engine engine) throws InitializationException {
|
||||
if (filesMatched) {
|
||||
initializeFileTypeAnalyzer();
|
||||
prepareFileTypeAnalyzer(engine);
|
||||
} else {
|
||||
this.setEnabled(false);
|
||||
}
|
||||
@@ -99,12 +93,13 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
protected abstract FileFilter getFileFilter();
|
||||
|
||||
/**
|
||||
* Initializes the file type analyzer.
|
||||
* Prepares the file type analyzer for dependency analysis.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if there is an exception during
|
||||
* initialization
|
||||
*/
|
||||
protected abstract void initializeFileTypeAnalyzer() throws InitializationException;
|
||||
protected abstract void prepareFileTypeAnalyzer(Engine engine) throws InitializationException;
|
||||
|
||||
//</editor-fold>
|
||||
/**
|
||||
@@ -135,7 +130,7 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
* constructs a new Set that can be used in a final static declaration.</p>
|
||||
* <p>
|
||||
* This implementation was copied from
|
||||
* http://stackoverflow.com/questions/2041778/initialize-java-hashset-values-by-construction</p>
|
||||
* http://stackoverflow.com/questions/2041778/prepare-java-hashset-values-by-construction</p>
|
||||
*
|
||||
* @param strings a list of strings to add to the set.
|
||||
* @return a Set of strings.
|
||||
|
||||
@@ -22,10 +22,11 @@ import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.Collections;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
@@ -47,6 +48,7 @@ import org.xml.sax.SAXException;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -56,15 +58,15 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
/**
|
||||
* The list of suppression rules
|
||||
*/
|
||||
private List<SuppressionRule> rules;
|
||||
private SuppressionRule[] rules = null;
|
||||
|
||||
/**
|
||||
* Get the number of suppression rules.
|
||||
*
|
||||
* @return the number of suppression rules
|
||||
*/
|
||||
protected synchronized int getRuleCount() {
|
||||
return rules.size();
|
||||
protected int getRuleCount() {
|
||||
return rules.length;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -77,22 +79,25 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* The initialize method loads the suppression XML file.
|
||||
* The prepare method loads the suppression XML file.
|
||||
*
|
||||
* @param engine a reference the dependency-check engine
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
@Override
|
||||
public void initializeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
loadSuppressionData();
|
||||
} catch (SuppressionParseException ex) {
|
||||
throw new InitializationException("Error initializing the suppression analyzer: " + ex.getLocalizedMessage(), ex);
|
||||
public synchronized void prepareAnalyzer(Engine engine) throws InitializationException {
|
||||
if (rules == null) {
|
||||
try {
|
||||
rules = loadSuppressionData();
|
||||
} catch (SuppressionParseException ex) {
|
||||
throw new InitializationException("Error initializing the suppression analyzer: " + ex.getLocalizedMessage(), ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected synchronized void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (rules == null || rules.size() <= 0) {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (rules == null || rules.length <= 0) {
|
||||
return;
|
||||
}
|
||||
for (final SuppressionRule rule : rules) {
|
||||
@@ -101,55 +106,59 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads all the suppression rules files configured in the {@link Settings}
|
||||
* singleton.
|
||||
* Loads all the suppression rules files configured in the {@link Settings}.
|
||||
*
|
||||
* @return the array of rules that were loaded
|
||||
* @throws SuppressionParseException thrown if the XML cannot be parsed.
|
||||
*/
|
||||
private synchronized void loadSuppressionData() throws SuppressionParseException {
|
||||
private SuppressionRule[] loadSuppressionData() throws SuppressionParseException {
|
||||
List<SuppressionRule> ruleList;
|
||||
final SuppressionParser parser = new SuppressionParser();
|
||||
try {
|
||||
final InputStream in = FileUtils.getResourceAsStream("dependencycheck-base-suppression.xml");
|
||||
rules = Collections.synchronizedList(parser.parseSuppressionRules(in));
|
||||
ruleList = parser.parseSuppressionRules(in);
|
||||
} catch (SAXException ex) {
|
||||
throw new SuppressionParseException("Unable to parse the base suppression data file", ex);
|
||||
}
|
||||
final String[] suppressionFilePaths = Settings.getArray(Settings.KEYS.SUPPRESSION_FILE);
|
||||
if (suppressionFilePaths == null || suppressionFilePaths.length == 0) {
|
||||
return;
|
||||
final String[] suppressionFilePaths = getSettings().getArray(Settings.KEYS.SUPPRESSION_FILE);
|
||||
if (suppressionFilePaths != null && suppressionFilePaths.length > 0) {
|
||||
// Load all the suppression file paths
|
||||
for (final String suppressionFilePath : suppressionFilePaths) {
|
||||
ruleList.addAll(loadSuppressionFile(parser, suppressionFilePath));
|
||||
}
|
||||
}
|
||||
|
||||
// Load all the suppression file paths
|
||||
for (final String suppressionFilePath : suppressionFilePaths) {
|
||||
loadSuppressionFile(parser, suppressionFilePath);
|
||||
}
|
||||
LOGGER.debug("{} suppression rules were loaded.", rules.size());
|
||||
LOGGER.debug("{} suppression rules were loaded.", ruleList.size());
|
||||
return ruleList.toArray(new SuppressionRule[ruleList.size()]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a single suppression rules file from the path provided using the
|
||||
* parser provided.
|
||||
*
|
||||
* @param parser the parser to use for loading the file.
|
||||
* @param suppressionFilePath the path to load.
|
||||
* @param parser the parser to use for loading the file
|
||||
* @param suppressionFilePath the path to load
|
||||
* @return the list of loaded suppression rules
|
||||
* @throws SuppressionParseException thrown if the suppression file cannot
|
||||
* be loaded and parsed.
|
||||
*/
|
||||
private synchronized void loadSuppressionFile(final SuppressionParser parser, final String suppressionFilePath) throws SuppressionParseException {
|
||||
private List<SuppressionRule> loadSuppressionFile(final SuppressionParser parser,
|
||||
final String suppressionFilePath) throws SuppressionParseException {
|
||||
LOGGER.debug("Loading suppression rules from '{}'", suppressionFilePath);
|
||||
|
||||
final List<SuppressionRule> list = new ArrayList<>();
|
||||
File file = null;
|
||||
boolean deleteTempFile = false;
|
||||
try {
|
||||
final Pattern uriRx = Pattern.compile("^(https?|file)\\:.*", Pattern.CASE_INSENSITIVE);
|
||||
if (uriRx.matcher(suppressionFilePath).matches()) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("suppression", "xml");
|
||||
file = getSettings().getTempFile("suppression", "xml");
|
||||
final URL url = new URL(suppressionFilePath);
|
||||
final Downloader downloader = new Downloader(getSettings());
|
||||
try {
|
||||
Downloader.fetchFile(url, file, false);
|
||||
downloader.fetchFile(url, file, false);
|
||||
} catch (DownloadFailedException ex) {
|
||||
Downloader.fetchFile(url, file, true);
|
||||
LOGGER.trace("Failed download - first attempt", ex);
|
||||
downloader.fetchFile(url, file, true);
|
||||
}
|
||||
} else {
|
||||
file = new File(suppressionFilePath);
|
||||
@@ -158,7 +167,7 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
try (InputStream suppressionsFromClasspath = FileUtils.getResourceAsStream(suppressionFilePath)) {
|
||||
if (suppressionsFromClasspath != null) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("suppression", "xml");
|
||||
file = getSettings().getTempFile("suppression", "xml");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyInputStreamToFile(suppressionsFromClasspath, file);
|
||||
} catch (IOException ex) {
|
||||
@@ -175,7 +184,7 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
throw new SuppressionParseException(msg);
|
||||
}
|
||||
try {
|
||||
rules.addAll(parser.parseSuppressionRules(file));
|
||||
list.addAll(parser.parseSuppressionRules(file));
|
||||
} catch (SuppressionParseException ex) {
|
||||
LOGGER.warn("Unable to parse suppression xml file '{}'", file.getPath());
|
||||
LOGGER.warn(ex.getMessage());
|
||||
@@ -195,6 +204,7 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
FileUtils.delete(file);
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -21,11 +21,22 @@ import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* An interface that defines an Analyzer that is used to identify Dependencies.
|
||||
* An analyzer will collect information about the dependency in the form of
|
||||
* Evidence.
|
||||
* Evidence.</p>
|
||||
* <p>
|
||||
* When the {@link org.owasp.dependencycheck.Engine} executes it will load the
|
||||
* analyzers and call the methods in the following order:</p>
|
||||
* <ol>
|
||||
* <li>{@link #initialize(org.owasp.dependencycheck.utils.Settings)}</li>
|
||||
* <li>{@link #prepare(org.owasp.dependencycheck.Engine)}</li>
|
||||
* <li>{@link #analyze(org.owasp.dependencycheck.dependency.Dependency, org.owasp.dependencycheck.Engine)}</li>
|
||||
* <li>{@link #close()}</li>
|
||||
* </ol>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -60,13 +71,21 @@ public interface Analyzer {
|
||||
AnalysisPhase getAnalysisPhase();
|
||||
|
||||
/**
|
||||
* The initialize method is called (once) prior to the analyze method being
|
||||
* Initializes the analyzer with the configured settings.
|
||||
*
|
||||
* @param settings the configured settings
|
||||
*/
|
||||
void initialize(Settings settings);
|
||||
|
||||
/**
|
||||
* The prepare method is called (once) prior to the analyze method being
|
||||
* called on all of the dependencies.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException is thrown if an exception occurs
|
||||
* initializing the analyzer.
|
||||
*/
|
||||
void initialize() throws InitializationException;
|
||||
void prepare(Engine engine) throws InitializationException;
|
||||
|
||||
/**
|
||||
* The close method is called after all of the dependencies have been
|
||||
@@ -77,16 +96,20 @@ public interface Analyzer {
|
||||
void close() throws Exception;
|
||||
|
||||
/**
|
||||
* Returns whether multiple instances of the same type of analyzer can run in parallel.
|
||||
* Note that running analyzers of different types in parallel is not supported at all.
|
||||
* Returns whether multiple instances of the same type of analyzer can run
|
||||
* in parallel. Note that running analyzers of different types in parallel
|
||||
* is not supported at all.
|
||||
*
|
||||
* @return {@code true} if the analyzer supports parallel processing, {@code false} else
|
||||
* @return {@code true} if the analyzer supports parallel processing,
|
||||
* {@code false} else
|
||||
*/
|
||||
boolean supportsParallelProcessing();
|
||||
|
||||
/**
|
||||
* Get the value of enabled.
|
||||
*
|
||||
* @return the value of enabled
|
||||
*/
|
||||
boolean isEnabled();
|
||||
|
||||
}
|
||||
|
||||
@@ -18,21 +18,21 @@
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.ServiceLoader;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* The Analyzer Service Loader. This class loads all services that implement
|
||||
* org.owasp.dependencycheck.analyzer.Analyzer.
|
||||
* {@link org.owasp.dependencycheck.analyzer.Analyzer}.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class AnalyzerService {
|
||||
|
||||
/**
|
||||
@@ -44,14 +44,20 @@ public class AnalyzerService {
|
||||
* The service loader for analyzers.
|
||||
*/
|
||||
private final ServiceLoader<Analyzer> service;
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private final boolean loadExperimental;
|
||||
|
||||
/**
|
||||
* Creates a new instance of AnalyzerService.
|
||||
*
|
||||
* @param classLoader the ClassLoader to use when dynamically loading
|
||||
* Analyzer and Update services
|
||||
* @param loadExperimental whether or not to load the experimental analyzers
|
||||
*/
|
||||
public AnalyzerService(ClassLoader classLoader) {
|
||||
public AnalyzerService(ClassLoader classLoader, boolean loadExperimental) {
|
||||
this.loadExperimental = loadExperimental;
|
||||
service = ServiceLoader.load(Analyzer.class, classLoader);
|
||||
}
|
||||
|
||||
@@ -85,18 +91,12 @@ public class AnalyzerService {
|
||||
private List<Analyzer> getAnalyzers(List<AnalysisPhase> phases) {
|
||||
final List<Analyzer> analyzers = new ArrayList<>();
|
||||
final Iterator<Analyzer> iterator = service.iterator();
|
||||
boolean experimentalEnabled = false;
|
||||
try {
|
||||
experimentalEnabled = Settings.getBoolean(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, false);
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.error("invalid experimental setting", ex);
|
||||
}
|
||||
while (iterator.hasNext()) {
|
||||
final Analyzer a = iterator.next();
|
||||
if (!phases.contains(a.getAnalysisPhase())) {
|
||||
continue;
|
||||
}
|
||||
if (!experimentalEnabled && a.getClass().isAnnotationPresent(Experimental.class)) {
|
||||
if (!loadExperimental && a.getClass().isAnnotationPresent(Experimental.class)) {
|
||||
continue;
|
||||
}
|
||||
LOGGER.debug("Loaded Analyzer {}", a.getName());
|
||||
|
||||
@@ -28,6 +28,8 @@ import java.util.Collections;
|
||||
import java.util.Enumeration;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
import org.apache.commons.compress.archivers.ArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.ArchiveInputStream;
|
||||
@@ -61,6 +63,7 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -71,7 +74,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* The count of directories created during analysis. This is used for
|
||||
* creating temporary directories.
|
||||
*/
|
||||
private static int dirCount = 0;
|
||||
private static final AtomicInteger DIRECTORY_COUNT = new AtomicInteger(0);
|
||||
/**
|
||||
* The parent directory for the individual directories per archive.
|
||||
*/
|
||||
@@ -80,21 +83,11 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* The max scan depth that the analyzer will recursively extract nested
|
||||
* archives.
|
||||
*/
|
||||
private static final int MAX_SCAN_DEPTH = Settings.getInt("archive.scan.depth", 3);
|
||||
private int maxScanDepth;
|
||||
/**
|
||||
* Tracks the current scan/extraction depth for nested archives.
|
||||
* The file filter used to filter supported files.
|
||||
*/
|
||||
private int scanDepth = 0;
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "Archive Analyzer";
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INITIAL;
|
||||
private FileFilter fileFilter = null;
|
||||
/**
|
||||
* The set of things we can handle with Zip methods
|
||||
*/
|
||||
@@ -106,35 +99,41 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz", "bz2", "tbz2");
|
||||
|
||||
static {
|
||||
final String additionalZipExt = Settings.getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS);
|
||||
if (additionalZipExt != null) {
|
||||
final String[] ext = additionalZipExt.split("\\s*,\\s*");
|
||||
Collections.addAll(KNOWN_ZIP_EXT, ext);
|
||||
}
|
||||
EXTENSIONS.addAll(KNOWN_ZIP_EXT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects files with extensions to remove from the engine's collection of
|
||||
* dependencies.
|
||||
*/
|
||||
private static final FileFilter REMOVE_FROM_ANALYSIS = FileFilterBuilder.newInstance()
|
||||
.addExtensions("zip", "tar", "gz", "tgz", "bz2", "tbz2").build();
|
||||
|
||||
/**
|
||||
* The file filter used to filter supported files.
|
||||
*/
|
||||
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build();
|
||||
|
||||
/**
|
||||
* Detects files with .zip extension.
|
||||
*/
|
||||
private static final FileFilter ZIP_FILTER = FileFilterBuilder.newInstance().addExtensions("zip").build();
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "Archive Analyzer";
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INITIAL;
|
||||
|
||||
/**
|
||||
* Initializes the analyzer with the configured settings.
|
||||
*
|
||||
* @param settings the configured settings to use
|
||||
*/
|
||||
@Override
|
||||
public void initialize(Settings settings) {
|
||||
super.initialize(settings);
|
||||
initializeSettings();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return FILTER;
|
||||
return fileFilter;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -170,15 +169,16 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* The initialize method does nothing for this Analyzer.
|
||||
* The prepare method does nothing for this Analyzer.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException is thrown if there is an exception
|
||||
* deleting or creating temporary files
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
public void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
try {
|
||||
final File baseDir = Settings.getTempDirectory();
|
||||
final File baseDir = getSettings().getTempDirectory();
|
||||
tempFileLocation = File.createTempFile("check", "tmp", baseDir);
|
||||
if (!tempFileLocation.delete()) {
|
||||
setEnabled(false);
|
||||
@@ -228,7 +228,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -242,6 +242,22 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
@Override
|
||||
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
extractAndAnalyze(dependency, engine, 0);
|
||||
engine.sortDependencies();
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the contents of the archive dependency and scans for additional
|
||||
* dependencies.
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param engine the engine doing the analysis
|
||||
* @param scanDepth the current scan depth; extracctAndAnalyze is recursive
|
||||
* and will, be default, only go 3 levels deep
|
||||
* @throws AnalysisException thrown if there is a problem analyzing the
|
||||
* dependencies
|
||||
*/
|
||||
private void extractAndAnalyze(Dependency dependency, Engine engine, int scanDepth) throws AnalysisException {
|
||||
final File f = new File(dependency.getActualFilePath());
|
||||
final File tmpDir = getNextTempDirectory();
|
||||
extractFiles(f, tmpDir, engine);
|
||||
@@ -261,14 +277,12 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
d.getFileName());
|
||||
d.setFilePath(displayPath);
|
||||
d.setFileName(displayName);
|
||||
d.setProjectReferences(dependency.getProjectReferences());
|
||||
d.addAllProjectReferences(dependency.getProjectReferences());
|
||||
|
||||
//TODO - can we get more evidence from the parent? EAR contains module name, etc.
|
||||
//analyze the dependency (i.e. extract files) if it is a supported type.
|
||||
if (this.accept(d.getActualFile()) && scanDepth < MAX_SCAN_DEPTH) {
|
||||
scanDepth += 1;
|
||||
analyze(d, engine);
|
||||
scanDepth -= 1;
|
||||
if (this.accept(d.getActualFile()) && scanDepth < maxScanDepth) {
|
||||
extractAndAnalyze(d, engine, scanDepth + 1);
|
||||
}
|
||||
} else {
|
||||
for (Dependency sub : dependencySet) {
|
||||
@@ -288,9 +302,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
if (REMOVE_FROM_ANALYSIS.accept(dependency.getActualFile())) {
|
||||
addDisguisedJarsToDependencies(dependency, engine);
|
||||
engine.getDependencies().remove(dependency);
|
||||
engine.removeDependency(dependency);
|
||||
}
|
||||
Collections.sort(engine.getDependencies());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -357,8 +370,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException thrown if unable to create temporary directory
|
||||
*/
|
||||
private File getNextTempDirectory() throws AnalysisException {
|
||||
dirCount += 1;
|
||||
final File directory = new File(tempFileLocation, String.valueOf(dirCount));
|
||||
final File directory = new File(tempFileLocation, String.valueOf(DIRECTORY_COUNT.incrementAndGet()));
|
||||
//getting an exception for some directories not being able to be created; might be because the directory already exists?
|
||||
if (directory.exists()) {
|
||||
return getNextTempDirectory();
|
||||
@@ -603,4 +615,19 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
return isJar;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes settings used by the scanning functions of the archive
|
||||
* analyzer.
|
||||
*/
|
||||
private void initializeSettings() {
|
||||
maxScanDepth = getSettings().getInt("archive.scan.depth", 3);
|
||||
final String additionalZipExt = getSettings().getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS);
|
||||
if (additionalZipExt != null) {
|
||||
final String[] ext = additionalZipExt.split("\\s*,\\s*");
|
||||
Collections.addAll(KNOWN_ZIP_EXT, ext);
|
||||
}
|
||||
EXTENSIONS.addAll(KNOWN_ZIP_EXT);
|
||||
fileFilter = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,7 +28,6 @@ import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.FileUtils;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
@@ -43,9 +42,11 @@ import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.apache.commons.lang3.SystemUtils;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
|
||||
/**
|
||||
@@ -55,6 +56,7 @@ import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
* @author colezlaw
|
||||
*
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -91,8 +93,8 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
// Use file.separator as a wild guess as to whether this is Windows
|
||||
final List<String> args = new ArrayList<>();
|
||||
if (!SystemUtils.IS_OS_WINDOWS) {
|
||||
if (Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH) != null) {
|
||||
args.add(Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH));
|
||||
if (getSettings().getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH) != null) {
|
||||
args.add(getSettings().getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH));
|
||||
} else if (isInPath("mono")) {
|
||||
args.add("mono");
|
||||
} else {
|
||||
@@ -111,20 +113,16 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException if anything goes sideways
|
||||
*/
|
||||
@Override
|
||||
public void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
|
||||
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
final File test = new File(dependency.getActualFilePath());
|
||||
if (!test.isFile()) {
|
||||
throw new AnalysisException(String.format("%s does not exist and cannot be analyzed by dependency-check",
|
||||
dependency.getActualFilePath()));
|
||||
}
|
||||
|
||||
if (grokAssemblyExe == null) {
|
||||
LOGGER.warn("GrokAssembly didn't get deployed");
|
||||
return;
|
||||
}
|
||||
|
||||
final List<String> args = buildArgumentList();
|
||||
if (args == null) {
|
||||
LOGGER.warn("Assembly Analyzer was unable to execute");
|
||||
@@ -172,20 +170,17 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
final String version = xpath.evaluate("/assembly/version", doc);
|
||||
if (version != null) {
|
||||
dependency.getVersionEvidence().addEvidence(new Evidence("grokassembly", "version",
|
||||
version, Confidence.HIGHEST));
|
||||
dependency.addEvidence(EvidenceType.VERSION, "grokassembly", "version", version, Confidence.HIGHEST);
|
||||
}
|
||||
|
||||
final String vendor = xpath.evaluate("/assembly/company", doc);
|
||||
if (vendor != null) {
|
||||
dependency.getVendorEvidence().addEvidence(new Evidence("grokassembly", "vendor",
|
||||
vendor, Confidence.HIGH));
|
||||
dependency.addEvidence(EvidenceType.VENDOR, "grokassembly", "vendor", vendor, Confidence.HIGH);
|
||||
}
|
||||
|
||||
final String product = xpath.evaluate("/assembly/product", doc);
|
||||
if (product != null) {
|
||||
dependency.getProductEvidence().addEvidence(new Evidence("grokassembly", "product",
|
||||
product, Confidence.HIGH));
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, "grokassembly", "product", product, Confidence.HIGH);
|
||||
}
|
||||
|
||||
} catch (ParserConfigurationException pce) {
|
||||
@@ -199,22 +194,21 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.error("----------------------------------------------------");
|
||||
throw new AnalysisException("Couldn't parse Assembly Analyzer results (GrokAssembly)", saxe);
|
||||
}
|
||||
// This shouldn't happen
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the analyzer. In this case, extract GrokAssembly.exe to a
|
||||
* temporary location.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if anything goes wrong
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
public void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
final File tempFile;
|
||||
final File cfgFile;
|
||||
try {
|
||||
tempFile = File.createTempFile("GKA", ".exe", Settings.getTempDirectory());
|
||||
tempFile = File.createTempFile("GKA", ".exe", getSettings().getTempDirectory());
|
||||
cfgFile = new File(tempFile.getPath() + ".config");
|
||||
} catch (IOException ex) {
|
||||
setEnabled(false);
|
||||
|
||||
@@ -22,7 +22,6 @@ import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.UrlStringUtils;
|
||||
@@ -33,6 +32,7 @@ import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
@@ -168,15 +168,14 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final String contents = getFileContents(actualFile);
|
||||
if (!contents.isEmpty()) {
|
||||
if (isOutputScript) {
|
||||
extractConfigureScriptEvidence(dependency, name,
|
||||
contents);
|
||||
extractConfigureScriptEvidence(dependency, name, contents);
|
||||
} else {
|
||||
gatherEvidence(dependency, name, contents);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
engine.getDependencies().remove(dependency);
|
||||
engine.removeDependency(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -195,17 +194,13 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final String value = matcher.group(2);
|
||||
if (!value.isEmpty()) {
|
||||
if (variable.endsWith("NAME")) {
|
||||
dependency.getProductEvidence().addEvidence(name, variable,
|
||||
value, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, name, variable, value, Confidence.HIGHEST);
|
||||
} else if ("VERSION".equals(variable)) {
|
||||
dependency.getVersionEvidence().addEvidence(name, variable,
|
||||
value, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VERSION, name, variable, value, Confidence.HIGHEST);
|
||||
} else if ("BUGREPORT".equals(variable)) {
|
||||
dependency.getVendorEvidence().addEvidence(name, variable,
|
||||
value, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, name, variable, value, Confidence.HIGH);
|
||||
} else if ("URL".equals(variable)) {
|
||||
dependency.getVendorEvidence().addEvidence(name, variable,
|
||||
value, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, name, variable, value, Confidence.HIGH);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -239,27 +234,19 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
String contents) {
|
||||
final Matcher matcher = AC_INIT_PATTERN.matcher(contents);
|
||||
if (matcher.find()) {
|
||||
final EvidenceCollection productEvidence = dependency
|
||||
.getProductEvidence();
|
||||
productEvidence.addEvidence(name, "Package", matcher.group(1),
|
||||
Confidence.HIGHEST);
|
||||
dependency.getVersionEvidence().addEvidence(name,
|
||||
"Package Version", matcher.group(2), Confidence.HIGHEST);
|
||||
final EvidenceCollection vendorEvidence = dependency
|
||||
.getVendorEvidence();
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, name, "Package", matcher.group(1), Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VERSION, name, "Package Version", matcher.group(2), Confidence.HIGHEST);
|
||||
|
||||
if (null != matcher.group(3)) {
|
||||
vendorEvidence.addEvidence(name, "Bug report address",
|
||||
matcher.group(4), Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, name, "Bug report address", matcher.group(4), Confidence.HIGH);
|
||||
}
|
||||
if (null != matcher.group(5)) {
|
||||
productEvidence.addEvidence(name, "Tarname", matcher.group(6),
|
||||
Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, name, "Tarname", matcher.group(6), Confidence.HIGH);
|
||||
}
|
||||
if (null != matcher.group(7)) {
|
||||
final String url = matcher.group(8);
|
||||
if (UrlStringUtils.isUrl(url)) {
|
||||
vendorEvidence.addEvidence(name, "URL", url,
|
||||
Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, name, "URL", url, Confidence.HIGH);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -268,11 +255,12 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Initializes the file type analyzer.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if there is an exception during
|
||||
* initialization
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
protected void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
// No initialization needed.
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,6 +38,7 @@ import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
@@ -125,11 +126,12 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Initializes the analyzer.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if an exception occurs getting an
|
||||
* instance of SHA1
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
protected void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
try {
|
||||
getSha1MessageDigest();
|
||||
} catch (IllegalStateException ex) {
|
||||
@@ -171,8 +173,8 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
m.groupCount(), m.group(0)));
|
||||
final String group = m.group(1);
|
||||
LOGGER.debug("Group 1: {}", group);
|
||||
dependency.getProductEvidence().addEvidence(name, "Project",
|
||||
group, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, name, "Project", group, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, name, "Project", group, Confidence.HIGH);
|
||||
}
|
||||
LOGGER.debug("Found {} matches.", count);
|
||||
analyzeSetVersionCommand(dependency, engine, contents);
|
||||
@@ -223,13 +225,12 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
final MessageDigest sha1 = getSha1MessageDigest();
|
||||
currentDep.setSha1sum(Checksum.getHex(sha1.digest(path)));
|
||||
engine.getDependencies().add(currentDep);
|
||||
engine.addDependency(currentDep);
|
||||
}
|
||||
final String source = currentDep.getDisplayFileName();
|
||||
currentDep.getProductEvidence().addEvidence(source, "Product",
|
||||
product, Confidence.MEDIUM);
|
||||
currentDep.getVersionEvidence().addEvidence(source, "Version",
|
||||
version, Confidence.MEDIUM);
|
||||
currentDep.addEvidence(EvidenceType.PRODUCT, source, "Product", product, Confidence.MEDIUM);
|
||||
currentDep.addEvidence(EvidenceType.VENDOR, source, "Vendor", product, Confidence.MEDIUM);
|
||||
currentDep.addEvidence(EvidenceType.VERSION, source, "Version", version, Confidence.MEDIUM);
|
||||
}
|
||||
LOGGER.debug("Found {} matches.", count);
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.StringTokenizer;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.apache.commons.lang3.builder.CompareToBuilder;
|
||||
import org.apache.lucene.analysis.util.CharArraySet;
|
||||
import org.apache.lucene.document.Document;
|
||||
@@ -46,7 +47,7 @@ import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
@@ -63,6 +64,7 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -92,6 +94,10 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
* data that will be written into the string.
|
||||
*/
|
||||
private static final int STRING_BUILDER_BUFFER = 20;
|
||||
/**
|
||||
* The URL to perform a search of the NVD CVE data at NIST.
|
||||
*/
|
||||
public static final String NVD_SEARCH_URL = "https://web.nvd.nist.gov/view/vuln/search-results?adv_search=true&cves=on&cpe_version=%s";
|
||||
/**
|
||||
* The CPE in memory index.
|
||||
*/
|
||||
@@ -101,11 +107,6 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
*/
|
||||
private CveDB cve;
|
||||
|
||||
/**
|
||||
* The URL to perform a search of the NVD CVE data at NIST.
|
||||
*/
|
||||
public static final String NVD_SEARCH_URL = "https://web.nvd.nist.gov/view/vuln/search-results?adv_search=true&cves=on&cpe_version=%s";
|
||||
|
||||
/**
|
||||
* Returns the name of this analyzer.
|
||||
*
|
||||
@@ -126,26 +127,17 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
return AnalysisPhase.IDENTIFIER_ANALYSIS;
|
||||
}
|
||||
|
||||
/**
|
||||
* The default is to support parallel processing.
|
||||
*
|
||||
* @return false
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the CPE Lucene Index.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException is thrown if there is an issue opening
|
||||
* the index.
|
||||
*/
|
||||
@Override
|
||||
public void initializeAnalyzer() throws InitializationException {
|
||||
public void prepareAnalyzer(Engine engine) throws InitializationException {
|
||||
try {
|
||||
this.open();
|
||||
this.open(engine.getDatabase());
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Exception initializing the Lucene Index", ex);
|
||||
throw new InitializationException("An exception occurred initializing the Lucene Index", ex);
|
||||
@@ -158,24 +150,23 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
/**
|
||||
* Opens the data source.
|
||||
*
|
||||
* @param cve a reference to the NVD CVE database
|
||||
* @throws IOException when the Lucene directory to be queried does not
|
||||
* exist or is corrupt.
|
||||
* @throws DatabaseException when the database throws an exception. This
|
||||
* usually occurs when the database is in use by another process.
|
||||
*/
|
||||
public void open() throws IOException, DatabaseException {
|
||||
if (!isOpen()) {
|
||||
cve = CveDB.getInstance();
|
||||
cpe = CpeMemoryIndex.getInstance();
|
||||
try {
|
||||
final long creationStart = System.currentTimeMillis();
|
||||
cpe.open(cve);
|
||||
final long creationSeconds = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - creationStart);
|
||||
LOGGER.info("Created CPE Index ({} seconds)", creationSeconds);
|
||||
} catch (IndexException ex) {
|
||||
LOGGER.debug("IndexException", ex);
|
||||
throw new DatabaseException(ex);
|
||||
}
|
||||
public void open(CveDB cve) throws IOException, DatabaseException {
|
||||
this.cve = cve;
|
||||
this.cpe = CpeMemoryIndex.getInstance();
|
||||
try {
|
||||
final long creationStart = System.currentTimeMillis();
|
||||
cpe.open(cve);
|
||||
final long creationSeconds = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - creationStart);
|
||||
LOGGER.info("Created CPE Index ({} seconds)", creationSeconds);
|
||||
} catch (IndexException ex) {
|
||||
LOGGER.debug("IndexException", ex);
|
||||
throw new DatabaseException(ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -184,25 +175,12 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
*/
|
||||
@Override
|
||||
public void closeAnalyzer() {
|
||||
if (cve != null) {
|
||||
cve.close();
|
||||
cve = null;
|
||||
}
|
||||
if (cpe != null) {
|
||||
cpe.close();
|
||||
cpe = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether or not the analyzer is open.
|
||||
*
|
||||
* @return <code>true</code> if the analyzer is open
|
||||
*/
|
||||
public boolean isOpen() {
|
||||
return cpe != null && cpe.isOpen();
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches the data store of CPE entries, trying to identify the CPE for
|
||||
* the given dependency based on the evidence contained within. The
|
||||
@@ -217,17 +195,17 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
String vendors = "";
|
||||
String products = "";
|
||||
for (Confidence confidence : Confidence.values()) {
|
||||
if (dependency.getVendorEvidence().contains(confidence)) {
|
||||
vendors = addEvidenceWithoutDuplicateTerms(vendors, dependency.getVendorEvidence(), confidence);
|
||||
if (dependency.contains(EvidenceType.VENDOR, confidence)) {
|
||||
vendors = addEvidenceWithoutDuplicateTerms(vendors, dependency.getIterator(EvidenceType.VENDOR, confidence));
|
||||
LOGGER.debug("vendor search: {}", vendors);
|
||||
}
|
||||
if (dependency.getProductEvidence().contains(confidence)) {
|
||||
products = addEvidenceWithoutDuplicateTerms(products, dependency.getProductEvidence(), confidence);
|
||||
if (dependency.contains(EvidenceType.PRODUCT, confidence)) {
|
||||
products = addEvidenceWithoutDuplicateTerms(products, dependency.getIterator(EvidenceType.PRODUCT, confidence));
|
||||
LOGGER.debug("product search: {}", products);
|
||||
}
|
||||
if (!vendors.isEmpty() && !products.isEmpty()) {
|
||||
final List<IndexEntry> entries = searchCPE(vendors, products, dependency.getVendorEvidence().getWeighting(),
|
||||
dependency.getProductEvidence().getWeighting());
|
||||
final List<IndexEntry> entries = searchCPE(vendors, products, dependency.getVendorWeightings(),
|
||||
dependency.getProductWeightings());
|
||||
if (entries == null) {
|
||||
continue;
|
||||
}
|
||||
@@ -254,26 +232,24 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
* attempts to prevent duplicate terms from being added.<br/<br/> Note, if
|
||||
* the evidence is longer then 200 characters it will be truncated.
|
||||
*
|
||||
* @param text the base text.
|
||||
* @param ec an EvidenceCollection
|
||||
* @param confidenceFilter a Confidence level to filter the evidence by.
|
||||
* @param text the base text
|
||||
* @param evidence an iterable set of evidence to concatenate
|
||||
* @return the new evidence text
|
||||
*/
|
||||
private String addEvidenceWithoutDuplicateTerms(final String text, final EvidenceCollection ec, Confidence confidenceFilter) {
|
||||
private String addEvidenceWithoutDuplicateTerms(final String text, final Iterable<Evidence> evidence) {
|
||||
final String txt = (text == null) ? "" : text;
|
||||
final StringBuilder sb = new StringBuilder(txt.length() + (20 * ec.size()));
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
sb.append(' ').append(txt).append(' ');
|
||||
for (Evidence e : ec.iterator(confidenceFilter)) {
|
||||
String value = e.getValue();
|
||||
|
||||
//hack to get around the fact that lucene does a really good job of recognizing domains and not
|
||||
// splitting them. TODO - put together a better lucene analyzer specific to the domain.
|
||||
if (value.startsWith("http://")) {
|
||||
value = value.substring(7).replaceAll("\\.", " ");
|
||||
}
|
||||
if (value.startsWith("https://")) {
|
||||
value = value.substring(8).replaceAll("\\.", " ");
|
||||
}
|
||||
for (Evidence e : evidence) {
|
||||
final String value = e.getValue();
|
||||
//removed as the URLTokenizingFilter was created
|
||||
//hack to get around the fact that lucene does a really good job of recognizing domains and not splitting them.
|
||||
// if (value.startsWith("http://")) {
|
||||
// value = value.substring(7).replaceAll("\\.", " ");
|
||||
// }
|
||||
// if (value.startsWith("https://")) {
|
||||
// value = value.substring(8).replaceAll("\\.", " ");
|
||||
// }
|
||||
if (sb.indexOf(" " + value + " ") < 0) {
|
||||
sb.append(value).append(' ');
|
||||
}
|
||||
@@ -466,8 +442,8 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
//TODO - does this nullify some of the fuzzy matching that happens in the lucene search?
|
||||
// for instance CPE some-component and in the evidence we have SomeComponent.
|
||||
if (collectionContainsString(dependency.getProductEvidence(), entry.getProduct())
|
||||
&& collectionContainsString(dependency.getVendorEvidence(), entry.getVendor())) {
|
||||
if (collectionContainsString(dependency.getEvidence(EvidenceType.PRODUCT), entry.getProduct())
|
||||
&& collectionContainsString(dependency.getEvidence(EvidenceType.VENDOR), entry.getVendor())) {
|
||||
//&& collectionContainsVersion(dependency.getVersionEvidence(), entry.getVersion())
|
||||
isValid = true;
|
||||
}
|
||||
@@ -477,11 +453,11 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
/**
|
||||
* Used to determine if the EvidenceCollection contains a specific string.
|
||||
*
|
||||
* @param ec an EvidenceCollection
|
||||
* @param evidence an of evidence object to check
|
||||
* @param text the text to search for
|
||||
* @return whether or not the EvidenceCollection contains the string
|
||||
*/
|
||||
private boolean collectionContainsString(EvidenceCollection ec, String text) {
|
||||
private boolean collectionContainsString(Set<Evidence> evidence, String text) {
|
||||
//TODO - likely need to change the split... not sure if this will work for CPE with special chars
|
||||
if (text == null) {
|
||||
return false;
|
||||
@@ -489,7 +465,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
final String[] words = text.split("[\\s_-]");
|
||||
final List<String> list = new ArrayList<>();
|
||||
String tempWord = null;
|
||||
CharArraySet stopWords = SearchFieldAnalyzer.getStopWords();
|
||||
final CharArraySet stopWords = SearchFieldAnalyzer.getStopWords();
|
||||
for (String word : words) {
|
||||
if (stopWords.contains(word)) {
|
||||
continue;
|
||||
@@ -518,11 +494,24 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
if (list.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
boolean contains = true;
|
||||
boolean isValid = true;
|
||||
for (String word : list) {
|
||||
contains &= ec.containsUsedString(word);
|
||||
boolean found = false;
|
||||
for (Evidence e : evidence) {
|
||||
if (e.getValue().toLowerCase().contains(word.toLowerCase())) {
|
||||
if ("http".equals(word) && e.getValue().contains("http:")) {
|
||||
continue;
|
||||
}
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
isValid &= found;
|
||||
if (!isValid) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return contains;
|
||||
return isValid;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -535,7 +524,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
* dependency.
|
||||
*/
|
||||
@Override
|
||||
protected synchronized void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
try {
|
||||
determineCPE(dependency);
|
||||
} catch (CorruptIndexException ex) {
|
||||
@@ -578,7 +567,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
// if there lower confidence evidence when the current (highest) version number
|
||||
// is newer then anything in the NVD.
|
||||
for (Confidence conf : Confidence.values()) {
|
||||
for (Evidence evidence : dependency.getVersionEvidence().iterator(conf)) {
|
||||
for (Evidence evidence : dependency.getIterator(EvidenceType.VERSION, conf)) {
|
||||
final DependencyVersion evVer = DependencyVersionUtil.parseVersion(evidence.getValue());
|
||||
if (evVer == null) {
|
||||
continue;
|
||||
|
||||
@@ -36,6 +36,8 @@ import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.DownloadFailedException;
|
||||
import org.owasp.dependencycheck.utils.Downloader;
|
||||
@@ -49,6 +51,7 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -84,7 +87,18 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Field indicating if the analyzer is enabled.
|
||||
*/
|
||||
private final boolean enabled = checkEnabled();
|
||||
private boolean enabled = true;
|
||||
|
||||
/**
|
||||
* Initializes the analyzer with the configured settings.
|
||||
*
|
||||
* @param settings the configured settings to use
|
||||
*/
|
||||
@Override
|
||||
public void initialize(Settings settings) {
|
||||
super.initialize(settings);
|
||||
enabled = checkEnabled();
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether to enable this analyzer or not.
|
||||
@@ -106,9 +120,9 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
boolean retVal = false;
|
||||
|
||||
try {
|
||||
if (Settings.getBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED)) {
|
||||
if (!Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED)
|
||||
|| NexusAnalyzer.DEFAULT_URL.equals(Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL))) {
|
||||
if (getSettings().getBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED)) {
|
||||
if (!getSettings().getBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED)
|
||||
|| NexusAnalyzer.DEFAULT_URL.equals(getSettings().getString(Settings.KEYS.ANALYZER_NEXUS_URL))) {
|
||||
LOGGER.debug("Enabling the Central analyzer");
|
||||
retVal = true;
|
||||
} else {
|
||||
@@ -126,20 +140,19 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Initializes the analyzer once before any analysis is performed.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException if there's an error during initialization
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
public void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
LOGGER.debug("Initializing Central analyzer");
|
||||
LOGGER.debug("Central analyzer enabled: {}", isEnabled());
|
||||
if (isEnabled()) {
|
||||
final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_CENTRAL_URL);
|
||||
LOGGER.debug("Central Analyzer URL: {}", searchUrl);
|
||||
try {
|
||||
searcher = new CentralSearch(new URL(searchUrl));
|
||||
searcher = new CentralSearch(getSettings());
|
||||
} catch (MalformedURLException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("The configured URL to Maven Central is malformed: " + searchUrl, ex);
|
||||
throw new InitializationException("The configured URL to Maven Central is malformed", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -205,7 +218,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.debug("Central analyzer found artifact ({}) for dependency ({})", ma, dependency.getFileName());
|
||||
dependency.addAsEvidence("central", ma, confidence);
|
||||
boolean pomAnalyzed = false;
|
||||
for (Evidence e : dependency.getVendorEvidence()) {
|
||||
for (Evidence e : dependency.getEvidence(EvidenceType.VENDOR)) {
|
||||
if ("pom".equals(e.getSource())) {
|
||||
pomAnalyzed = true;
|
||||
break;
|
||||
@@ -214,7 +227,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if (!pomAnalyzed && ma.getPomUrl() != null) {
|
||||
File pomFile = null;
|
||||
try {
|
||||
final File baseDir = Settings.getTempDirectory();
|
||||
final File baseDir = getSettings().getTempDirectory();
|
||||
pomFile = File.createTempFile("pom", ".xml", baseDir);
|
||||
if (!pomFile.delete()) {
|
||||
LOGGER.warn("Unable to fetch pom.xml for {} from Central; "
|
||||
@@ -222,7 +235,8 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.debug("Unable to delete temp file");
|
||||
}
|
||||
LOGGER.debug("Downloading {}", ma.getPomUrl());
|
||||
Downloader.fetchFile(new URL(ma.getPomUrl()), pomFile);
|
||||
final Downloader downloader = new Downloader(getSettings());
|
||||
downloader.fetchFile(new URL(ma.getPomUrl()), pomFile);
|
||||
PomUtils.analyzePOM(dependency, pomFile);
|
||||
|
||||
} catch (DownloadFailedException ex) {
|
||||
|
||||
@@ -23,13 +23,14 @@ import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
@@ -41,6 +42,7 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
* @author Bianca Jiang (https://twitter.com/biancajiang)
|
||||
*/
|
||||
@Experimental
|
||||
@ThreadSafe
|
||||
public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -83,7 +85,7 @@ public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() {
|
||||
protected void prepareFileTypeAnalyzer(Engine engine) {
|
||||
// NO-OP
|
||||
}
|
||||
|
||||
@@ -134,21 +136,34 @@ public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
contents = contents.substring(matcher.end());
|
||||
final String blockVariable = matcher.group(1);
|
||||
|
||||
final EvidenceCollection vendor = dependency.getVendorEvidence();
|
||||
final EvidenceCollection product = dependency.getProductEvidence();
|
||||
final EvidenceCollection version = dependency.getVersionEvidence();
|
||||
|
||||
final String name = addStringEvidence(product, contents, blockVariable, "name", "name", Confidence.HIGHEST);
|
||||
final String name = determineEvidence(contents, blockVariable, "name");
|
||||
if (!name.isEmpty()) {
|
||||
vendor.addEvidence(PODSPEC, "name_project", name, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, PODSPEC, "name_project", name, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, PODSPEC, "name_project", name, Confidence.HIGHEST);
|
||||
}
|
||||
final String summary = determineEvidence(contents, blockVariable, "summary");
|
||||
if (!summary.isEmpty()) {
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, PODSPEC, "summary", summary, Confidence.HIGHEST);
|
||||
}
|
||||
addStringEvidence(product, contents, blockVariable, "summary", "summary", Confidence.HIGHEST);
|
||||
|
||||
addStringEvidence(vendor, contents, blockVariable, "author", "authors?", Confidence.HIGHEST);
|
||||
addStringEvidence(vendor, contents, blockVariable, "homepage", "homepage", Confidence.HIGHEST);
|
||||
addStringEvidence(vendor, contents, blockVariable, "license", "licen[cs]es?", Confidence.HIGHEST);
|
||||
final String author = determineEvidence(contents, blockVariable, "authors?");
|
||||
if (!author.isEmpty()) {
|
||||
dependency.addEvidence(EvidenceType.VENDOR, PODSPEC, "author", author, Confidence.HIGHEST);
|
||||
}
|
||||
final String homepage = determineEvidence(contents, blockVariable, "homepage");
|
||||
if (!homepage.isEmpty()) {
|
||||
dependency.addEvidence(EvidenceType.VENDOR, PODSPEC, "homepage", homepage, Confidence.HIGHEST);
|
||||
}
|
||||
final String license = determineEvidence(contents, blockVariable, "licen[cs]es?");
|
||||
if (!license.isEmpty()) {
|
||||
dependency.addEvidence(EvidenceType.VENDOR, PODSPEC, "license", license, Confidence.HIGHEST);
|
||||
}
|
||||
|
||||
final String version = determineEvidence(contents, blockVariable, "version");
|
||||
if (!version.isEmpty()) {
|
||||
dependency.addEvidence(EvidenceType.VERSION, PODSPEC, "version", version, Confidence.HIGHEST);
|
||||
}
|
||||
|
||||
addStringEvidence(version, contents, blockVariable, "version", "version", Confidence.HIGHEST);
|
||||
}
|
||||
|
||||
setPackagePath(dependency);
|
||||
@@ -158,16 +173,12 @@ public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* Extracts evidence from the contents and adds it to the given evidence
|
||||
* collection.
|
||||
*
|
||||
* @param evidences the evidence collection to update
|
||||
* @param contents the text to extract evidence from
|
||||
* @param blockVariable the block variable within the content to search for
|
||||
* @param field the name of the field being searched for
|
||||
* @param fieldPattern the field pattern within the contents to search for
|
||||
* @param confidence the confidence level of the evidence if found
|
||||
* @return the string that was added as evidence
|
||||
* @return the evidence
|
||||
*/
|
||||
private String addStringEvidence(EvidenceCollection evidences, String contents,
|
||||
String blockVariable, String field, String fieldPattern, Confidence confidence) {
|
||||
private String determineEvidence(String contents, String blockVariable, String fieldPattern) {
|
||||
String value = "";
|
||||
|
||||
//capture array value between [ ]
|
||||
@@ -184,9 +195,6 @@ public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
value = matcher.group(2);
|
||||
}
|
||||
}
|
||||
if (value.length() > 0) {
|
||||
evidences.addEvidence(PODSPEC, field, value, confidence);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@ import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
|
||||
/**
|
||||
* Used to analyze a composer.lock file for a composer PHP app.
|
||||
@@ -79,11 +80,12 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Initializes the analyzer.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if an exception occurs getting an
|
||||
* instance of SHA1
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
protected void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
try {
|
||||
getSha1MessageDigest();
|
||||
} catch (IllegalStateException ex) {
|
||||
@@ -112,11 +114,11 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final MessageDigest sha1 = getSha1MessageDigest();
|
||||
d.setFilePath(filePath);
|
||||
d.setSha1sum(Checksum.getHex(sha1.digest(filePath.getBytes(Charset.defaultCharset()))));
|
||||
d.getVendorEvidence().addEvidence(COMPOSER_LOCK, "vendor", dep.getGroup(), Confidence.HIGHEST);
|
||||
d.getProductEvidence().addEvidence(COMPOSER_LOCK, "product", dep.getProject(), Confidence.HIGHEST);
|
||||
d.getVersionEvidence().addEvidence(COMPOSER_LOCK, "version", dep.getVersion(), Confidence.HIGHEST);
|
||||
d.addEvidence(EvidenceType.VENDOR, COMPOSER_LOCK, "vendor", dep.getGroup(), Confidence.HIGHEST);
|
||||
d.addEvidence(EvidenceType.PRODUCT, COMPOSER_LOCK, "product", dep.getProject(), Confidence.HIGHEST);
|
||||
d.addEvidence(EvidenceType.VERSION, COMPOSER_LOCK, "version", dep.getVersion(), Confidence.HIGHEST);
|
||||
LOGGER.info("Adding dependency {}", d);
|
||||
engine.getDependencies().add(d);
|
||||
engine.addDependency(d);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.warn("Error opening dependency {}", dependency.getActualFilePath());
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
@@ -26,6 +27,7 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class CpeSuppressionAnalyzer extends AbstractSuppressionAnalyzer {
|
||||
|
||||
/**
|
||||
|
||||
@@ -18,14 +18,10 @@
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersion;
|
||||
@@ -47,37 +43,19 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|
||||
@ThreadSafe
|
||||
public class DependencyBundlingAnalyzer extends AbstractDependencyComparingAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DependencyBundlingAnalyzer.class);
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Constants and Member Variables">
|
||||
/**
|
||||
* A pattern for obtaining the first part of a filename.
|
||||
*/
|
||||
private static final Pattern STARTING_TEXT_PATTERN = Pattern.compile("^[a-zA-Z0-9]*");
|
||||
|
||||
/**
|
||||
* a flag indicating if this analyzer has run. This analyzer only runs once.
|
||||
*/
|
||||
private boolean analyzed = false;
|
||||
|
||||
/**
|
||||
* Returns a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once. Note this is currently only used in the unit tests.
|
||||
*
|
||||
* @return a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once
|
||||
*/
|
||||
protected synchronized boolean getAnalyzed() {
|
||||
return analyzed;
|
||||
}
|
||||
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
@@ -106,19 +84,6 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Does not support parallel processing as it only runs once and then
|
||||
* operates on <em>all</em> dependencies.
|
||||
*
|
||||
* @return whether or not parallel processing is enabled
|
||||
* @see #analyze(Dependency, Engine)
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
@@ -132,65 +97,46 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a set of dependencies. If they have been found to have the same
|
||||
* base path and the same set of identifiers they are likely related. The
|
||||
* related dependencies are bundled into a single reportable item.
|
||||
* Evaluates the dependencies
|
||||
*
|
||||
* @param ignore this analyzer ignores the dependency being analyzed
|
||||
* @param engine the engine that is scanning the dependencies
|
||||
* @throws AnalysisException is thrown if there is an error reading the JAR
|
||||
* file.
|
||||
* @param dependency a dependency to compare
|
||||
* @param nextDependency a dependency to compare
|
||||
* @param dependenciesToRemove a set of dependencies that will be removed
|
||||
* @return true if a dependency is removed; otherwise false
|
||||
*/
|
||||
@Override
|
||||
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException {
|
||||
if (!analyzed) {
|
||||
analyzed = true;
|
||||
final Set<Dependency> dependenciesToRemove = new HashSet<>();
|
||||
final ListIterator<Dependency> mainIterator = engine.getDependencies().listIterator();
|
||||
//for (Dependency nextDependency : engine.getDependencies()) {
|
||||
while (mainIterator.hasNext()) {
|
||||
final Dependency dependency = mainIterator.next();
|
||||
if (mainIterator.hasNext() && !dependenciesToRemove.contains(dependency)) {
|
||||
final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex());
|
||||
while (subIterator.hasNext()) {
|
||||
final Dependency nextDependency = subIterator.next();
|
||||
if (hashesMatch(dependency, nextDependency)) {
|
||||
if (!containedInWar(dependency.getFilePath())
|
||||
&& !containedInWar(nextDependency.getFilePath())) {
|
||||
if (firstPathIsShortest(dependency.getFilePath(), nextDependency.getFilePath())) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
}
|
||||
} else if (isShadedJar(dependency, nextDependency)) {
|
||||
if (dependency.getFileName().toLowerCase().endsWith("pom.xml")) {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
nextDependency.getRelatedDependencies().remove(dependency);
|
||||
break;
|
||||
} else {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
dependency.getRelatedDependencies().remove(nextDependency);
|
||||
}
|
||||
} else if (cpeIdentifiersMatch(dependency, nextDependency)
|
||||
&& hasSameBasePath(dependency, nextDependency)
|
||||
&& vulnCountMatches(dependency, nextDependency)
|
||||
&& fileNameMatch(dependency, nextDependency)) {
|
||||
if (isCore(dependency, nextDependency)) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
}
|
||||
}
|
||||
protected boolean evaluateDependencies(final Dependency dependency, final Dependency nextDependency, final Set<Dependency> dependenciesToRemove) {
|
||||
if (hashesMatch(dependency, nextDependency)) {
|
||||
if (!containedInWar(dependency.getFilePath())
|
||||
&& !containedInWar(nextDependency.getFilePath())) {
|
||||
if (firstPathIsShortest(dependency.getFilePath(), nextDependency.getFilePath())) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
return true; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
}
|
||||
//removing dependencies here as ensuring correctness and avoiding ConcurrentUpdateExceptions
|
||||
// was difficult because of the inner iterator.
|
||||
engine.getDependencies().removeAll(dependenciesToRemove);
|
||||
} else if (isShadedJar(dependency, nextDependency)) {
|
||||
if (dependency.getFileName().toLowerCase().endsWith("pom.xml")) {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
nextDependency.removeRelatedDependencies(dependency);
|
||||
return true;
|
||||
} else {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
dependency.removeRelatedDependencies(nextDependency);
|
||||
}
|
||||
} else if (cpeIdentifiersMatch(dependency, nextDependency)
|
||||
&& hasSameBasePath(dependency, nextDependency)
|
||||
&& vulnCountMatches(dependency, nextDependency)
|
||||
&& fileNameMatch(dependency, nextDependency)) {
|
||||
if (isCore(dependency, nextDependency)) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
return true; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -205,10 +151,9 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|
||||
*/
|
||||
private void mergeDependencies(final Dependency dependency, final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) {
|
||||
dependency.addRelatedDependency(relatedDependency);
|
||||
final Iterator<Dependency> i = relatedDependency.getRelatedDependencies().iterator();
|
||||
while (i.hasNext()) {
|
||||
dependency.addRelatedDependency(i.next());
|
||||
i.remove();
|
||||
for (Dependency d : relatedDependency.getRelatedDependencies()) {
|
||||
dependency.addRelatedDependency(d);
|
||||
relatedDependency.removeRelatedDependencies(d);
|
||||
}
|
||||
if (dependency.getSha1sum().equals(relatedDependency.getSha1sum())) {
|
||||
dependency.addAllProjectReferences(relatedDependency.getProjectReferences());
|
||||
@@ -239,7 +184,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
//below is always true
|
||||
//if (tmp > 0) {
|
||||
pos = tmp + 1;
|
||||
pos = tmp + 1;
|
||||
//}
|
||||
tmp = path.indexOf(File.separator, pos);
|
||||
if (tmp > 0) {
|
||||
|
||||
@@ -18,13 +18,10 @@
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Set;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -36,31 +33,12 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class DependencyMergingAnalyzer extends AbstractAnalyzer {
|
||||
public class DependencyMergingAnalyzer extends AbstractDependencyComparingAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Constants and Member Variables">
|
||||
/**
|
||||
* The Logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DependencyMergingAnalyzer.class);
|
||||
/**
|
||||
* a flag indicating if this analyzer has run. This analyzer only runs once.
|
||||
*/
|
||||
private boolean analyzed = false;
|
||||
|
||||
/**
|
||||
* Returns a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once. Note this is currently only used in the unit tests.
|
||||
*
|
||||
* @return a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once
|
||||
*/
|
||||
protected synchronized boolean getAnalyzed() {
|
||||
return analyzed;
|
||||
}
|
||||
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
@@ -90,18 +68,6 @@ public class DependencyMergingAnalyzer extends AbstractAnalyzer {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Does not support parallel processing as it only runs once and then
|
||||
* operates on <em>all</em> dependencies.
|
||||
*
|
||||
* @return whether or not parallel processing is enabled
|
||||
* @see #analyze(Dependency, Engine)
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
@@ -112,55 +78,34 @@ public class DependencyMergingAnalyzer extends AbstractAnalyzer {
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_DEPENDENCY_MERGING_ENABLED;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Analyzes a set of dependencies. If they have been found to be the same
|
||||
* dependency created by more multiple FileTypeAnalyzers (i.e. a gemspec
|
||||
* dependency and a dependency from the Bundle Audit Analyzer. The
|
||||
* dependencies are then merged into a single reportable item.
|
||||
* Evaluates the dependencies
|
||||
*
|
||||
* @param ignore this analyzer ignores the dependency being analyzed
|
||||
* @param engine the engine that is scanning the dependencies
|
||||
* @throws AnalysisException is thrown if there is an error reading the JAR
|
||||
* file.
|
||||
* @param dependency a dependency to compare
|
||||
* @param nextDependency a dependency to compare
|
||||
* @param dependenciesToRemove a set of dependencies that will be removed
|
||||
* @return true if a dependency is removed; otherwise false
|
||||
*/
|
||||
@Override
|
||||
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException {
|
||||
if (!analyzed) {
|
||||
analyzed = true;
|
||||
final Set<Dependency> dependenciesToRemove = new HashSet<>();
|
||||
final ListIterator<Dependency> mainIterator = engine.getDependencies().listIterator();
|
||||
//for (Dependency nextDependency : engine.getDependencies()) {
|
||||
while (mainIterator.hasNext()) {
|
||||
final Dependency dependency = mainIterator.next();
|
||||
if (mainIterator.hasNext() && !dependenciesToRemove.contains(dependency)) {
|
||||
final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex());
|
||||
while (subIterator.hasNext()) {
|
||||
final Dependency nextDependency = subIterator.next();
|
||||
Dependency main;
|
||||
if ((main = getMainGemspecDependency(dependency, nextDependency)) != null) {
|
||||
if (main == dependency) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
} else if ((main = getMainSwiftDependency(dependency, nextDependency)) != null) {
|
||||
if (main == dependency) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
protected boolean evaluateDependencies(final Dependency dependency, final Dependency nextDependency, final Set<Dependency> dependenciesToRemove) {
|
||||
Dependency main;
|
||||
if ((main = getMainGemspecDependency(dependency, nextDependency)) != null) {
|
||||
if (main == dependency) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
return true; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
} else if ((main = getMainSwiftDependency(dependency, nextDependency)) != null) {
|
||||
if (main == dependency) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
return true; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
//removing dependencies here as ensuring correctness and avoiding ConcurrentUpdateExceptions
|
||||
// was difficult because of the inner iterator.
|
||||
engine.getDependencies().removeAll(dependenciesToRemove);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -176,14 +121,19 @@ public class DependencyMergingAnalyzer extends AbstractAnalyzer {
|
||||
private void mergeDependencies(final Dependency dependency, final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) {
|
||||
LOGGER.debug("Merging '{}' into '{}'", relatedDependency.getFilePath(), dependency.getFilePath());
|
||||
dependency.addRelatedDependency(relatedDependency);
|
||||
dependency.getVendorEvidence().getEvidence().addAll(relatedDependency.getVendorEvidence().getEvidence());
|
||||
dependency.getProductEvidence().getEvidence().addAll(relatedDependency.getProductEvidence().getEvidence());
|
||||
dependency.getVersionEvidence().getEvidence().addAll(relatedDependency.getVersionEvidence().getEvidence());
|
||||
for (Evidence e : relatedDependency.getEvidence(EvidenceType.VENDOR)) {
|
||||
dependency.addEvidence(EvidenceType.VENDOR, e);
|
||||
}
|
||||
for (Evidence e : relatedDependency.getEvidence(EvidenceType.PRODUCT)) {
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, e);
|
||||
}
|
||||
for (Evidence e : relatedDependency.getEvidence(EvidenceType.VERSION)) {
|
||||
dependency.addEvidence(EvidenceType.VERSION, e);
|
||||
}
|
||||
|
||||
final Iterator<Dependency> i = relatedDependency.getRelatedDependencies().iterator();
|
||||
while (i.hasNext()) {
|
||||
dependency.addRelatedDependency(i.next());
|
||||
i.remove();
|
||||
for (Dependency d : relatedDependency.getRelatedDependencies()) {
|
||||
dependency.addRelatedDependency(d);
|
||||
relatedDependency.removeRelatedDependencies(d);
|
||||
}
|
||||
if (dependency.getSha1sum().equals(relatedDependency.getSha1sum())) {
|
||||
dependency.addAllProjectReferences(relatedDependency.getProjectReferences());
|
||||
|
||||
@@ -22,15 +22,18 @@ import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
@@ -44,6 +47,7 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -155,19 +159,19 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
}
|
||||
}
|
||||
if (mustContain
|
||||
!= null) {
|
||||
final Iterator<Identifier> itr = dependency.getIdentifiers().iterator();
|
||||
while (itr.hasNext()) {
|
||||
final Identifier i = itr.next();
|
||||
if (mustContain != null) {
|
||||
final Set<Identifier> removalSet = new HashSet<>();
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
if ("cpe".contains(i.getType())
|
||||
&& i.getValue() != null
|
||||
&& i.getValue().startsWith("cpe:/a:springsource:")
|
||||
&& !i.getValue().toLowerCase().contains(mustContain)) {
|
||||
itr.remove();
|
||||
//dependency.getIdentifiers().remove(i);
|
||||
removalSet.add(i);
|
||||
}
|
||||
}
|
||||
for (Identifier i : removalSet) {
|
||||
dependency.removeIdentifier(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -218,15 +222,15 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
//how did we get here?
|
||||
LOGGER.debug("currentVersion and nextVersion are both null?");
|
||||
} else if (currentVersion == null && nextVersion != null) {
|
||||
dependency.getIdentifiers().remove(currentId);
|
||||
dependency.removeIdentifier(currentId);
|
||||
} else if (nextVersion == null && currentVersion != null) {
|
||||
dependency.getIdentifiers().remove(nextId);
|
||||
dependency.removeIdentifier(nextId);
|
||||
} else if (currentVersion.length() < nextVersion.length()) {
|
||||
if (nextVersion.startsWith(currentVersion) || "-".equals(currentVersion)) {
|
||||
dependency.getIdentifiers().remove(currentId);
|
||||
dependency.removeIdentifier(currentId);
|
||||
}
|
||||
} else if (currentVersion.startsWith(nextVersion) || "-".equals(nextVersion)) {
|
||||
dependency.getIdentifiers().remove(nextId);
|
||||
dependency.removeIdentifier(nextId);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -241,21 +245,22 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
* @param dependency the dependency to remove JRE CPEs from
|
||||
*/
|
||||
private void removeJreEntries(Dependency dependency) {
|
||||
final Set<Identifier> identifiers = dependency.getIdentifiers();
|
||||
final Iterator<Identifier> itr = identifiers.iterator();
|
||||
while (itr.hasNext()) {
|
||||
final Identifier i = itr.next();
|
||||
final Set<Identifier> removalSet = new HashSet<>();
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
final Matcher coreCPE = CORE_JAVA.matcher(i.getValue());
|
||||
final Matcher coreFiles = CORE_FILES.matcher(dependency.getFileName());
|
||||
if (coreCPE.matches() && !coreFiles.matches()) {
|
||||
itr.remove();
|
||||
removalSet.add(i);
|
||||
}
|
||||
final Matcher coreJsfCPE = CORE_JAVA_JSF.matcher(i.getValue());
|
||||
final Matcher coreJsfFiles = CORE_JSF_FILES.matcher(dependency.getFileName());
|
||||
if (coreJsfCPE.matches() && !coreJsfFiles.matches()) {
|
||||
itr.remove();
|
||||
removalSet.add(i);
|
||||
}
|
||||
}
|
||||
for (Identifier i : removalSet) {
|
||||
dependency.removeIdentifier(i);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -286,9 +291,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
*/
|
||||
private void removeBadMatches(Dependency dependency) {
|
||||
final Set<Identifier> identifiers = dependency.getIdentifiers();
|
||||
final Iterator<Identifier> itr = identifiers.iterator();
|
||||
protected void removeBadMatches(Dependency dependency) {
|
||||
|
||||
/* TODO - can we utilize the pom's groupid and artifactId to filter??? most of
|
||||
* these are due to low quality data. Other idea would be to say any CPE
|
||||
@@ -297,8 +300,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
*/
|
||||
//Set<Evidence> groupId = dependency.getVendorEvidence().getEvidence("pom", "groupid");
|
||||
//Set<Evidence> artifactId = dependency.getVendorEvidence().getEvidence("pom", "artifactid");
|
||||
while (itr.hasNext()) {
|
||||
final Identifier i = itr.next();
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
//TODO move this startsWith expression to the base suppression file
|
||||
if ("cpe".equals(i.getType())) {
|
||||
if ((i.getValue().matches(".*c\\+\\+.*")
|
||||
@@ -322,7 +324,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".tgz")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".ear")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".war"))) {
|
||||
itr.remove();
|
||||
//itr.remove();
|
||||
dependency.removeIdentifier(i);
|
||||
} else if ((i.getValue().startsWith("cpe:/a:jquery:jquery")
|
||||
|| i.getValue().startsWith("cpe:/a:prototypejs:prototype")
|
||||
|| i.getValue().startsWith("cpe:/a:yahoo:yui"))
|
||||
@@ -330,7 +333,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
|| dependency.getFileName().toLowerCase().endsWith("pom.xml")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".dll")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".exe"))) {
|
||||
itr.remove();
|
||||
//itr.remove();
|
||||
dependency.removeIdentifier(i);
|
||||
} else if ((i.getValue().startsWith("cpe:/a:microsoft:excel")
|
||||
|| i.getValue().startsWith("cpe:/a:microsoft:word")
|
||||
|| i.getValue().startsWith("cpe:/a:microsoft:visio")
|
||||
@@ -341,16 +345,36 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".ear")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".war")
|
||||
|| dependency.getFileName().toLowerCase().endsWith("pom.xml"))) {
|
||||
itr.remove();
|
||||
//itr.remove();
|
||||
dependency.removeIdentifier(i);
|
||||
} else if (i.getValue().startsWith("cpe:/a:apache:maven")
|
||||
&& !dependency.getFileName().toLowerCase().matches("maven-core-[\\d\\.]+\\.jar")) {
|
||||
itr.remove();
|
||||
} else if (i.getValue().startsWith("cpe:/a:m-core:m-core")
|
||||
&& !dependency.getEvidenceUsed().containsUsedString("m-core")) {
|
||||
itr.remove();
|
||||
//itr.remove();
|
||||
dependency.removeIdentifier(i);
|
||||
} else if (i.getValue().startsWith("cpe:/a:m-core:m-core")) {
|
||||
boolean found = false;
|
||||
for (Evidence e : dependency.getEvidence(EvidenceType.PRODUCT)) {
|
||||
if ("m-core".equalsIgnoreCase(e.getValue())) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
for (Evidence e : dependency.getEvidence(EvidenceType.VENDOR)) {
|
||||
if ("m-core".equalsIgnoreCase(e.getValue())) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
//itr.remove();
|
||||
dependency.removeIdentifier(i);
|
||||
}
|
||||
} else if (i.getValue().startsWith("cpe:/a:jboss:jboss")
|
||||
&& !dependency.getFileName().toLowerCase().matches("jboss-?[\\d\\.-]+(GA)?\\.jar")) {
|
||||
itr.remove();
|
||||
//itr.remove();
|
||||
dependency.removeIdentifier(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -363,31 +387,30 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
* @param dependency the dependency to analyze
|
||||
*/
|
||||
private void removeWrongVersionMatches(Dependency dependency) {
|
||||
final Set<Identifier> identifiers = dependency.getIdentifiers();
|
||||
final Iterator<Identifier> itr = identifiers.iterator();
|
||||
|
||||
final Set<Identifier> identifiersToRemove = new HashSet<>();
|
||||
final String fileName = dependency.getFileName();
|
||||
if (fileName != null && fileName.contains("axis2")) {
|
||||
while (itr.hasNext()) {
|
||||
final Identifier i = itr.next();
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
if ("cpe".equals(i.getType())) {
|
||||
final String cpe = i.getValue();
|
||||
if (cpe != null && (cpe.startsWith("cpe:/a:apache:axis:") || "cpe:/a:apache:axis".equals(cpe))) {
|
||||
itr.remove();
|
||||
identifiersToRemove.add(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (fileName != null && fileName.contains("axis")) {
|
||||
while (itr.hasNext()) {
|
||||
final Identifier i = itr.next();
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
if ("cpe".equals(i.getType())) {
|
||||
final String cpe = i.getValue();
|
||||
if (cpe != null && (cpe.startsWith("cpe:/a:apache:axis2:") || "cpe:/a:apache:axis2".equals(cpe))) {
|
||||
itr.remove();
|
||||
identifiersToRemove.add(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (Identifier i : identifiersToRemove) {
|
||||
dependency.removeIdentifier(i);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -411,17 +434,13 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
final String newCpe3 = String.format("cpe:/a:sun:opensso:%s", identifier.getValue().substring(22));
|
||||
final String newCpe4 = String.format("cpe:/a:oracle:opensso:%s", identifier.getValue().substring(22));
|
||||
try {
|
||||
dependency.addIdentifier("cpe",
|
||||
newCpe,
|
||||
dependency.addIdentifier("cpe", newCpe,
|
||||
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe, "UTF-8")));
|
||||
dependency.addIdentifier("cpe",
|
||||
newCpe2,
|
||||
dependency.addIdentifier("cpe", newCpe2,
|
||||
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe2, "UTF-8")));
|
||||
dependency.addIdentifier("cpe",
|
||||
newCpe3,
|
||||
dependency.addIdentifier("cpe", newCpe3,
|
||||
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe3, "UTF-8")));
|
||||
dependency.addIdentifier("cpe",
|
||||
newCpe4,
|
||||
dependency.addIdentifier("cpe", newCpe4,
|
||||
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe4, "UTF-8")));
|
||||
} catch (UnsupportedEncodingException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
@@ -444,7 +463,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
String parentPath = dependency.getFilePath().toLowerCase();
|
||||
if (parentPath.contains(".jar")) {
|
||||
parentPath = parentPath.substring(0, parentPath.indexOf(".jar") + 4);
|
||||
final List<Dependency> dependencies = engine.getDependencies();
|
||||
final Dependency[] dependencies = engine.getDependencies();
|
||||
final Dependency parent = findDependency(parentPath, dependencies);
|
||||
if (parent != null) {
|
||||
boolean remove = false;
|
||||
@@ -462,7 +481,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
}
|
||||
if (remove) {
|
||||
dependencies.remove(dependency);
|
||||
engine.removeDependency(dependency);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -474,10 +493,10 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
* dependencies.
|
||||
*
|
||||
* @param dependencyPath the path of the dependency to return
|
||||
* @param dependencies the collection of dependencies to search
|
||||
* @param dependencies the array of dependencies to search
|
||||
* @return the dependency object for the given path, otherwise null
|
||||
*/
|
||||
private Dependency findDependency(String dependencyPath, List<Dependency> dependencies) {
|
||||
private Dependency findDependency(String dependencyPath, Dependency[] dependencies) {
|
||||
for (Dependency d : dependencies) {
|
||||
if (d.getFilePath().equalsIgnoreCase(dependencyPath)) {
|
||||
return d;
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.apache.commons.io.filefilter.NameFileFilter;
|
||||
@@ -25,16 +26,17 @@ import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersion;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
*
|
||||
* Takes a dependency and analyzes the filename and determines the hashes.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class FileNameAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -76,6 +78,7 @@ public class FileNameAnalyzer extends AbstractAnalyzer {
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
@@ -111,21 +114,16 @@ public class FileNameAnalyzer extends AbstractAnalyzer {
|
||||
// a shade. This should hopefully correct for cases like log4j.jar or
|
||||
// struts2-core.jar
|
||||
if (version.getVersionParts() == null || version.getVersionParts().size() < 2) {
|
||||
dependency.getVersionEvidence().addEvidence("file", "version",
|
||||
version.toString(), Confidence.MEDIUM);
|
||||
dependency.addEvidence(EvidenceType.VERSION, "file", "version", version.toString(), Confidence.MEDIUM);
|
||||
} else {
|
||||
dependency.getVersionEvidence().addEvidence("file", "version",
|
||||
version.toString(), Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VERSION, "file", "version", version.toString(), Confidence.HIGHEST);
|
||||
}
|
||||
dependency.getVersionEvidence().addEvidence("file", "name",
|
||||
packageName, Confidence.MEDIUM);
|
||||
dependency.addEvidence(EvidenceType.VERSION, "file", "name", packageName, Confidence.MEDIUM);
|
||||
}
|
||||
|
||||
if (!IGNORED_FILES.accept(f)) {
|
||||
dependency.getProductEvidence().addEvidence("file", "name",
|
||||
packageName, Confidence.HIGH);
|
||||
dependency.getVendorEvidence().addEvidence("file", "name",
|
||||
packageName, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, "file", "name", packageName, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, "file", "name", packageName, Confidence.HIGH);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,10 +26,12 @@ import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.xml.suppression.PropertyType;
|
||||
import org.owasp.dependencycheck.utils.DownloadFailedException;
|
||||
@@ -40,7 +42,6 @@ import org.owasp.dependencycheck.xml.hints.VendorDuplicatingHintRule;
|
||||
import org.owasp.dependencycheck.xml.hints.HintParseException;
|
||||
import org.owasp.dependencycheck.xml.hints.HintParser;
|
||||
import org.owasp.dependencycheck.xml.hints.HintRule;
|
||||
import org.owasp.dependencycheck.xml.hints.Hints;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.xml.sax.SAXException;
|
||||
@@ -51,6 +52,7 @@ import org.xml.sax.SAXException;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class HintAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -62,11 +64,13 @@ public class HintAnalyzer extends AbstractAnalyzer {
|
||||
*/
|
||||
private static final String HINT_RULE_FILE_NAME = "dependencycheck-base-hint.xml";
|
||||
/**
|
||||
* The collection of hints.
|
||||
* The array of hint rules.
|
||||
*/
|
||||
private Hints hints;
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
private HintRule[] hints = null;
|
||||
/**
|
||||
* The array of vendor duplicating hint rules.
|
||||
*/
|
||||
private VendorDuplicatingHintRule[] vendorHints;
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
@@ -108,12 +112,13 @@ public class HintAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* The initialize method does nothing for this Analyzer.
|
||||
* The prepare method does nothing for this Analyzer.
|
||||
*
|
||||
* @param engine a reference the dependency-check engine
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
@Override
|
||||
public void initializeAnalyzer() throws InitializationException {
|
||||
public void prepareAnalyzer(Engine engine) throws InitializationException {
|
||||
try {
|
||||
loadHintRules();
|
||||
} catch (HintParseException ex) {
|
||||
@@ -121,7 +126,6 @@ public class HintAnalyzer extends AbstractAnalyzer {
|
||||
throw new InitializationException("Unable to parse the hint file", ex);
|
||||
}
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* The HintAnalyzer uses knowledge about a dependency to add additional
|
||||
@@ -134,17 +138,17 @@ public class HintAnalyzer extends AbstractAnalyzer {
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
for (HintRule hint : hints.getHintRules()) {
|
||||
for (HintRule hint : hints) {
|
||||
boolean matchFound = false;
|
||||
for (Evidence given : hint.getGivenVendor()) {
|
||||
if (dependency.getVendorEvidence().getEvidence().contains(given)) {
|
||||
if (dependency.contains(EvidenceType.VENDOR, given)) {
|
||||
matchFound = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!matchFound) {
|
||||
for (Evidence given : hint.getGivenProduct()) {
|
||||
if (dependency.getProductEvidence().getEvidence().contains(given)) {
|
||||
if (dependency.contains(EvidenceType.PRODUCT, given)) {
|
||||
matchFound = true;
|
||||
break;
|
||||
}
|
||||
@@ -152,7 +156,7 @@ public class HintAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
if (!matchFound) {
|
||||
for (Evidence given : hint.getGivenVersion()) {
|
||||
if (dependency.getVersionEvidence().getEvidence().contains(given)) {
|
||||
if (dependency.contains(EvidenceType.VERSION, given)) {
|
||||
matchFound = true;
|
||||
break;
|
||||
}
|
||||
@@ -168,45 +172,45 @@ public class HintAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
if (matchFound) {
|
||||
for (Evidence e : hint.getAddVendor()) {
|
||||
dependency.getVendorEvidence().addEvidence(e);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, e);
|
||||
}
|
||||
for (Evidence e : hint.getAddProduct()) {
|
||||
dependency.getProductEvidence().addEvidence(e);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, e);
|
||||
}
|
||||
for (Evidence e : hint.getAddVersion()) {
|
||||
dependency.getVersionEvidence().addEvidence(e);
|
||||
dependency.addEvidence(EvidenceType.VERSION, e);
|
||||
}
|
||||
for (Evidence e : hint.getRemoveVendor()) {
|
||||
if (dependency.getVendorEvidence().getEvidence().contains(e)) {
|
||||
dependency.getVendorEvidence().getEvidence().remove(e);
|
||||
if (dependency.contains(EvidenceType.VENDOR, e)) {
|
||||
dependency.removeEvidence(EvidenceType.VENDOR, e);
|
||||
}
|
||||
}
|
||||
for (Evidence e : hint.getRemoveProduct()) {
|
||||
if (dependency.getProductEvidence().getEvidence().contains(e)) {
|
||||
dependency.getProductEvidence().getEvidence().remove(e);
|
||||
if (dependency.contains(EvidenceType.PRODUCT, e)) {
|
||||
dependency.removeEvidence(EvidenceType.PRODUCT, e);
|
||||
}
|
||||
}
|
||||
for (Evidence e : hint.getRemoveVersion()) {
|
||||
if (dependency.getVersionEvidence().getEvidence().contains(e)) {
|
||||
dependency.getVersionEvidence().getEvidence().remove(e);
|
||||
if (dependency.contains(EvidenceType.VERSION, e)) {
|
||||
dependency.removeEvidence(EvidenceType.VERSION, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final Iterator<Evidence> itr = dependency.getVendorEvidence().iterator();
|
||||
final Iterator<Evidence> itr = dependency.getEvidence(EvidenceType.VENDOR).iterator();
|
||||
final List<Evidence> newEntries = new ArrayList<>();
|
||||
while (itr.hasNext()) {
|
||||
final Evidence e = itr.next();
|
||||
for (VendorDuplicatingHintRule dhr : hints.getVendorDuplicatingHintRules()) {
|
||||
if (dhr.getValue().equalsIgnoreCase(e.getValue(false))) {
|
||||
for (VendorDuplicatingHintRule dhr : vendorHints) {
|
||||
if (dhr.getValue().equalsIgnoreCase(e.getValue())) {
|
||||
newEntries.add(new Evidence(e.getSource() + " (hint)",
|
||||
e.getName(), dhr.getDuplicate(), e.getConfidence()));
|
||||
}
|
||||
}
|
||||
}
|
||||
for (Evidence e : newEntries) {
|
||||
dependency.getVendorEvidence().addEvidence(e);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -216,71 +220,81 @@ public class HintAnalyzer extends AbstractAnalyzer {
|
||||
* @throws HintParseException thrown if the XML cannot be parsed.
|
||||
*/
|
||||
private void loadHintRules() throws HintParseException {
|
||||
List<HintRule> localHints;
|
||||
List<VendorDuplicatingHintRule> localVendorHints;
|
||||
final HintParser parser = new HintParser();
|
||||
File file = null;
|
||||
try {
|
||||
hints = parser.parseHints(FileUtils.getResourceAsStream(HINT_RULE_FILE_NAME));
|
||||
} catch (HintParseException | SAXException ex) {
|
||||
LOGGER.error("Unable to parse the base hint data file");
|
||||
LOGGER.debug("Unable to parse the base hint data file", ex);
|
||||
parser.parseHints(FileUtils.getResourceAsStream(HINT_RULE_FILE_NAME));
|
||||
} catch (SAXException ex) {
|
||||
throw new HintParseException("Error parsing hinits: " + ex.getMessage(), ex);
|
||||
}
|
||||
final String filePath = Settings.getString(Settings.KEYS.HINTS_FILE);
|
||||
if (filePath == null) {
|
||||
return;
|
||||
}
|
||||
boolean deleteTempFile = false;
|
||||
try {
|
||||
final Pattern uriRx = Pattern.compile("^(https?|file)\\:.*", Pattern.CASE_INSENSITIVE);
|
||||
if (uriRx.matcher(filePath).matches()) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("hint", "xml");
|
||||
final URL url = new URL(filePath);
|
||||
try {
|
||||
Downloader.fetchFile(url, file, false);
|
||||
} catch (DownloadFailedException ex) {
|
||||
Downloader.fetchFile(url, file, true);
|
||||
}
|
||||
} else {
|
||||
file = new File(filePath);
|
||||
if (!file.exists()) {
|
||||
try (InputStream fromClasspath = FileUtils.getResourceAsStream(filePath)) {
|
||||
if (fromClasspath != null) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("hint", "xml");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyInputStreamToFile(fromClasspath, file);
|
||||
} catch (IOException ex) {
|
||||
throw new HintParseException("Unable to locate hints file in classpath", ex);
|
||||
localHints = parser.getHintRules();
|
||||
localVendorHints = parser.getVendorDuplicatingHintRules();
|
||||
|
||||
final String filePath = getSettings().getString(Settings.KEYS.HINTS_FILE);
|
||||
if (filePath != null) {
|
||||
boolean deleteTempFile = false;
|
||||
try {
|
||||
final Pattern uriRx = Pattern.compile("^(https?|file)\\:.*", Pattern.CASE_INSENSITIVE);
|
||||
if (uriRx.matcher(filePath).matches()) {
|
||||
deleteTempFile = true;
|
||||
file = getSettings().getTempFile("hint", "xml");
|
||||
final URL url = new URL(filePath);
|
||||
final Downloader downloader = new Downloader(getSettings());
|
||||
try {
|
||||
downloader.fetchFile(url, file, false);
|
||||
} catch (DownloadFailedException ex) {
|
||||
downloader.fetchFile(url, file, true);
|
||||
}
|
||||
} else {
|
||||
file = new File(filePath);
|
||||
if (!file.exists()) {
|
||||
try (InputStream fromClasspath = FileUtils.getResourceAsStream(filePath)) {
|
||||
if (fromClasspath != null) {
|
||||
deleteTempFile = true;
|
||||
file = getSettings().getTempFile("hint", "xml");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyInputStreamToFile(fromClasspath, file);
|
||||
} catch (IOException ex) {
|
||||
throw new HintParseException("Unable to locate hints file in classpath", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (file != null) {
|
||||
try {
|
||||
final Hints newHints = parser.parseHints(file);
|
||||
hints.getHintRules().addAll(newHints.getHintRules());
|
||||
hints.getVendorDuplicatingHintRules().addAll(newHints.getVendorDuplicatingHintRules());
|
||||
LOGGER.debug("{} hint rules were loaded.", hints.getHintRules().size());
|
||||
LOGGER.debug("{} duplicating hint rules were loaded.", hints.getVendorDuplicatingHintRules().size());
|
||||
} catch (HintParseException ex) {
|
||||
LOGGER.warn("Unable to parse hint rule xml file '{}'", file.getPath());
|
||||
LOGGER.warn(ex.getMessage());
|
||||
LOGGER.debug("", ex);
|
||||
throw ex;
|
||||
if (file != null) {
|
||||
try {
|
||||
parser.parseHints(file);
|
||||
if (parser.getHintRules() != null && !parser.getHintRules().isEmpty()) {
|
||||
localHints.addAll(parser.getHintRules());
|
||||
}
|
||||
if (parser.getVendorDuplicatingHintRules() != null && !parser.getVendorDuplicatingHintRules().isEmpty()) {
|
||||
localVendorHints.addAll(parser.getVendorDuplicatingHintRules());
|
||||
}
|
||||
} catch (HintParseException ex) {
|
||||
LOGGER.warn("Unable to parse hint rule xml file '{}'", file.getPath());
|
||||
LOGGER.warn(ex.getMessage());
|
||||
LOGGER.debug("", ex);
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
} catch (DownloadFailedException ex) {
|
||||
throw new HintParseException("Unable to fetch the configured hint file", ex);
|
||||
} catch (MalformedURLException ex) {
|
||||
throw new HintParseException("Configured hint file has an invalid URL", ex);
|
||||
} catch (IOException ex) {
|
||||
throw new HintParseException("Unable to create temp file for hints", ex);
|
||||
} finally {
|
||||
if (deleteTempFile && file != null) {
|
||||
FileUtils.delete(file);
|
||||
}
|
||||
}
|
||||
} catch (DownloadFailedException ex) {
|
||||
throw new HintParseException("Unable to fetch the configured hint file", ex);
|
||||
} catch (MalformedURLException ex) {
|
||||
throw new HintParseException("Configured hint file has an invalid URL", ex);
|
||||
} catch (IOException ex) {
|
||||
throw new HintParseException("Unable to create temp file for hints", ex);
|
||||
} finally {
|
||||
if (deleteTempFile && file != null) {
|
||||
FileUtils.delete(file);
|
||||
}
|
||||
}
|
||||
hints = (HintRule[]) localHints.toArray(new HintRule[localHints.size()]);
|
||||
vendorHints = (VendorDuplicatingHintRule[]) localVendorHints.toArray(new VendorDuplicatingHintRule[localVendorHints.size()]);
|
||||
LOGGER.debug("{} hint rules were loaded.", hints.length);
|
||||
LOGGER.debug("{} duplicating hint rules were loaded.", vendorHints.length);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,7 +53,7 @@ import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.FileUtils;
|
||||
@@ -251,7 +251,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|| fileName.endsWith("-doc.jar")
|
||||
|| isMacOSMetaDataFile(dependency, engine))
|
||||
|| !isZipFile(dependency)) {
|
||||
engine.getDependencies().remove(dependency);
|
||||
engine.removeDependency(dependency);
|
||||
return;
|
||||
}
|
||||
final boolean hasManifest = parseManifest(dependency, classNames);
|
||||
@@ -264,13 +264,15 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the given dependency appears to be a macOS metadata file, returning true if its filename starts with a
|
||||
* ._ prefix and if there is another dependency with the same filename minus the ._ prefix, otherwise it returns
|
||||
* false.
|
||||
* Checks if the given dependency appears to be a macOS metadata file,
|
||||
* returning true if its filename starts with a ._ prefix and if there is
|
||||
* another dependency with the same filename minus the ._ prefix, otherwise
|
||||
* it returns false.
|
||||
*
|
||||
* @param dependency the dependency to check if it's a macOS metadata file
|
||||
* @param engine the engine that is scanning the dependencies
|
||||
* @return whether or not the given dependency appears to be a macOS metadata file
|
||||
* @param engine the engine that is scanning the dependencies
|
||||
* @return whether or not the given dependency appears to be a macOS
|
||||
* metadata file
|
||||
*/
|
||||
private boolean isMacOSMetaDataFile(final Dependency dependency, final Engine engine) {
|
||||
final String fileName = Paths.get(dependency.getActualFilePath()).getFileName().toString();
|
||||
@@ -278,17 +280,19 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates through the given list of dependencies and returns true when it finds a dependency with a filename
|
||||
* matching the given filename, otherwise returns false.
|
||||
* Iterates through the given list of dependencies and returns true when it
|
||||
* finds a dependency with a filename matching the given filename, otherwise
|
||||
* returns false.
|
||||
*
|
||||
* @param dependencies the dependencies to search within
|
||||
* @param fileName the filename to search for
|
||||
* @return whether or not the given dependencies contain a dependency with the given filename
|
||||
* @param fileName the filename to search for
|
||||
* @return whether or not the given dependencies contain a dependency with
|
||||
* the given filename
|
||||
*/
|
||||
private boolean hasDependencyWithFilename(final List<Dependency> dependencies, final String fileName) {
|
||||
private boolean hasDependencyWithFilename(final Dependency[] dependencies, final String fileName) {
|
||||
for (final Dependency dependency : dependencies) {
|
||||
if (Paths.get(dependency.getActualFilePath()).getFileName().toString().toLowerCase()
|
||||
.equals(fileName.toLowerCase())) {
|
||||
.equals(fileName.toLowerCase())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -296,23 +300,25 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to read the first bytes of the given dependency (using its actual file path) and returns true if they
|
||||
* match the expected first bytes of a zip file, which may be empty or spanned. If they don't match, or if the file
|
||||
* could not be read, then it returns false.
|
||||
* Attempts to read the first bytes of the given dependency (using its
|
||||
* actual file path) and returns true if they match the expected first bytes
|
||||
* of a zip file, which may be empty or spanned. If they don't match, or if
|
||||
* the file could not be read, then it returns false.
|
||||
*
|
||||
* @param dependency the dependency to check if it's a zip file
|
||||
* @return whether or not the given dependency appears to be a zip file from its first bytes
|
||||
* @return whether or not the given dependency appears to be a zip file from
|
||||
* its first bytes
|
||||
*/
|
||||
private boolean isZipFile(final Dependency dependency) {
|
||||
final byte[] buffer = new byte[4];
|
||||
try (final FileInputStream fileInputStream = new FileInputStream(dependency.getActualFilePath())) {
|
||||
fileInputStream.read(buffer);
|
||||
if (Arrays.equals(buffer, ZIP_FIRST_BYTES) || Arrays.equals(buffer, ZIP_EMPTY_FIRST_BYTES) ||
|
||||
Arrays.equals(buffer, ZIP_SPANNED_FIRST_BYTES)) {
|
||||
if (fileInputStream.read(buffer) > 0
|
||||
&& (Arrays.equals(buffer, ZIP_FIRST_BYTES)
|
||||
|| Arrays.equals(buffer, ZIP_EMPTY_FIRST_BYTES)
|
||||
|| Arrays.equals(buffer, ZIP_SPANNED_FIRST_BYTES))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
} catch (Exception e) {
|
||||
LOGGER.warn("Unable to check if '{}' is a zip file", dependency.getActualFilePath());
|
||||
LOGGER.trace("", e);
|
||||
}
|
||||
@@ -381,7 +387,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
newDependency.setFileName(displayName);
|
||||
newDependency.setFilePath(displayPath);
|
||||
setPomEvidence(newDependency, pom, null);
|
||||
engine.getDependencies().add(newDependency);
|
||||
engine.addDependency(newDependency);
|
||||
} catch (AnalysisException ex) {
|
||||
LOGGER.warn("An error occurred while analyzing '{}'.", dependency.getActualFilePath());
|
||||
LOGGER.trace("", ex);
|
||||
@@ -521,15 +527,15 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
if (groupid != null && !groupid.isEmpty()) {
|
||||
foundSomething = true;
|
||||
dependency.getVendorEvidence().addEvidence("pom", "groupid", groupid, Confidence.HIGHEST);
|
||||
dependency.getProductEvidence().addEvidence("pom", "groupid", groupid, Confidence.LOW);
|
||||
addMatchingValues(classes, groupid, dependency.getVendorEvidence());
|
||||
addMatchingValues(classes, groupid, dependency.getProductEvidence());
|
||||
dependency.addEvidence(EvidenceType.VENDOR, "pom", "groupid", groupid, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, "pom", "groupid", groupid, Confidence.LOW);
|
||||
addMatchingVendorValues(classes, groupid, dependency);
|
||||
addMatchingProductValues(classes, groupid, dependency);
|
||||
if (parentGroupId != null && !parentGroupId.isEmpty() && !parentGroupId.equals(groupid)) {
|
||||
dependency.getVendorEvidence().addEvidence("pom", "parent-groupid", parentGroupId, Confidence.MEDIUM);
|
||||
dependency.getProductEvidence().addEvidence("pom", "parent-groupid", parentGroupId, Confidence.LOW);
|
||||
addMatchingValues(classes, parentGroupId, dependency.getVendorEvidence());
|
||||
addMatchingValues(classes, parentGroupId, dependency.getProductEvidence());
|
||||
dependency.addEvidence(EvidenceType.VENDOR, "pom", "parent-groupid", parentGroupId, Confidence.MEDIUM);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, "pom", "parent-groupid", parentGroupId, Confidence.LOW);
|
||||
addMatchingVendorValues(classes, parentGroupId, dependency);
|
||||
addMatchingProductValues(classes, parentGroupId, dependency);
|
||||
}
|
||||
} else {
|
||||
addAsIdentifier = false;
|
||||
@@ -537,15 +543,15 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
if (artifactid != null && !artifactid.isEmpty()) {
|
||||
foundSomething = true;
|
||||
dependency.getProductEvidence().addEvidence("pom", "artifactid", artifactid, Confidence.HIGHEST);
|
||||
dependency.getVendorEvidence().addEvidence("pom", "artifactid", artifactid, Confidence.LOW);
|
||||
addMatchingValues(classes, artifactid, dependency.getVendorEvidence());
|
||||
addMatchingValues(classes, artifactid, dependency.getProductEvidence());
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, "pom", "artifactid", artifactid, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, "pom", "artifactid", artifactid, Confidence.LOW);
|
||||
addMatchingVendorValues(classes, artifactid, dependency);
|
||||
addMatchingProductValues(classes, artifactid, dependency);
|
||||
if (parentArtifactId != null && !parentArtifactId.isEmpty() && !parentArtifactId.equals(artifactid)) {
|
||||
dependency.getProductEvidence().addEvidence("pom", "parent-artifactid", parentArtifactId, Confidence.MEDIUM);
|
||||
dependency.getVendorEvidence().addEvidence("pom", "parent-artifactid", parentArtifactId, Confidence.LOW);
|
||||
addMatchingValues(classes, parentArtifactId, dependency.getVendorEvidence());
|
||||
addMatchingValues(classes, parentArtifactId, dependency.getProductEvidence());
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, "pom", "parent-artifactid", parentArtifactId, Confidence.MEDIUM);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, "pom", "parent-artifactid", parentArtifactId, Confidence.LOW);
|
||||
addMatchingProductValues(classes, parentArtifactId, dependency);
|
||||
addMatchingProductValues(classes, parentArtifactId, dependency);
|
||||
}
|
||||
} else {
|
||||
addAsIdentifier = false;
|
||||
@@ -553,9 +559,9 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
if (version != null && !version.isEmpty()) {
|
||||
foundSomething = true;
|
||||
dependency.getVersionEvidence().addEvidence("pom", "version", version, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VERSION, "pom", "version", version, Confidence.HIGHEST);
|
||||
if (parentVersion != null && !parentVersion.isEmpty() && !parentVersion.equals(version)) {
|
||||
dependency.getVersionEvidence().addEvidence("pom", "parent-version", version, Confidence.LOW);
|
||||
dependency.addEvidence(EvidenceType.VERSION, "pom", "parent-version", version, Confidence.LOW);
|
||||
}
|
||||
} else {
|
||||
addAsIdentifier = false;
|
||||
@@ -568,26 +574,26 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
// org name
|
||||
final String org = pom.getOrganization();
|
||||
if (org != null && !org.isEmpty()) {
|
||||
dependency.getVendorEvidence().addEvidence("pom", "organization name", org, Confidence.HIGH);
|
||||
dependency.getProductEvidence().addEvidence("pom", "organization name", org, Confidence.LOW);
|
||||
addMatchingValues(classes, org, dependency.getVendorEvidence());
|
||||
addMatchingValues(classes, org, dependency.getProductEvidence());
|
||||
dependency.addEvidence(EvidenceType.VENDOR, "pom", "organization name", org, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, "pom", "organization name", org, Confidence.LOW);
|
||||
addMatchingVendorValues(classes, org, dependency);
|
||||
addMatchingProductValues(classes, org, dependency);
|
||||
}
|
||||
// org name
|
||||
final String orgUrl = pom.getOrganizationUrl();
|
||||
if (orgUrl != null && !orgUrl.isEmpty()) {
|
||||
dependency.getVendorEvidence().addEvidence("pom", "organization url", orgUrl, Confidence.MEDIUM);
|
||||
dependency.getProductEvidence().addEvidence("pom", "organization url", orgUrl, Confidence.LOW);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, "pom", "organization url", orgUrl, Confidence.MEDIUM);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, "pom", "organization url", orgUrl, Confidence.LOW);
|
||||
}
|
||||
//pom name
|
||||
final String pomName = pom.getName();
|
||||
if (pomName
|
||||
!= null && !pomName.isEmpty()) {
|
||||
foundSomething = true;
|
||||
dependency.getProductEvidence().addEvidence("pom", "name", pomName, Confidence.HIGH);
|
||||
dependency.getVendorEvidence().addEvidence("pom", "name", pomName, Confidence.HIGH);
|
||||
addMatchingValues(classes, pomName, dependency.getVendorEvidence());
|
||||
addMatchingValues(classes, pomName, dependency.getProductEvidence());
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, "pom", "name", pomName, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, "pom", "name", pomName, Confidence.HIGH);
|
||||
addMatchingVendorValues(classes, pomName, dependency);
|
||||
addMatchingProductValues(classes, pomName, dependency);
|
||||
}
|
||||
|
||||
//Description
|
||||
@@ -595,13 +601,13 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if (description != null && !description.isEmpty() && !description.startsWith("POM was created by")) {
|
||||
foundSomething = true;
|
||||
final String trimmedDescription = addDescription(dependency, description, "pom", "description");
|
||||
addMatchingValues(classes, trimmedDescription, dependency.getVendorEvidence());
|
||||
addMatchingValues(classes, trimmedDescription, dependency.getProductEvidence());
|
||||
addMatchingVendorValues(classes, trimmedDescription, dependency);
|
||||
addMatchingProductValues(classes, trimmedDescription, dependency);
|
||||
}
|
||||
|
||||
final String projectURL = pom.getProjectURL();
|
||||
if (projectURL != null && !projectURL.trim().isEmpty()) {
|
||||
dependency.getVendorEvidence().addEvidence("pom", "url", projectURL, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, "pom", "url", projectURL, Confidence.HIGHEST);
|
||||
}
|
||||
|
||||
extractLicense(pom, dependency);
|
||||
@@ -626,25 +632,24 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
analyzeFullyQualifiedClassNames(classNames, vendorIdentifiers, productIdentifiers);
|
||||
|
||||
final int classCount = classNames.size();
|
||||
final EvidenceCollection vendor = dependency.getVendorEvidence();
|
||||
final EvidenceCollection product = dependency.getProductEvidence();
|
||||
|
||||
for (Map.Entry<String, Integer> entry : vendorIdentifiers.entrySet()) {
|
||||
final float ratio = entry.getValue() / (float) classCount;
|
||||
if (ratio > 0.5) {
|
||||
//TODO remove weighting
|
||||
vendor.addWeighting(entry.getKey());
|
||||
//TODO remove weighting?
|
||||
dependency.addVendorWeighting(entry.getKey());
|
||||
if (addPackagesAsEvidence && entry.getKey().length() > 1) {
|
||||
vendor.addEvidence("jar", "package name", entry.getKey(), Confidence.LOW);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, "jar", "package name", entry.getKey(), Confidence.LOW);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (Map.Entry<String, Integer> entry : productIdentifiers.entrySet()) {
|
||||
final float ratio = entry.getValue() / (float) classCount;
|
||||
if (ratio > 0.5) {
|
||||
product.addWeighting(entry.getKey());
|
||||
//todo remove weighting
|
||||
dependency.addProductWeighting(entry.getKey());
|
||||
if (addPackagesAsEvidence && entry.getKey().length() > 1) {
|
||||
product.addEvidence("jar", "package name", entry.getKey(), Confidence.LOW);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, "jar", "package name", entry.getKey(), Confidence.LOW);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -681,9 +686,6 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
return false;
|
||||
}
|
||||
final EvidenceCollection vendorEvidence = dependency.getVendorEvidence();
|
||||
final EvidenceCollection productEvidence = dependency.getProductEvidence();
|
||||
final EvidenceCollection versionEvidence = dependency.getVersionEvidence();
|
||||
String source = "Manifest";
|
||||
String specificationVersion = null;
|
||||
boolean hasImplementationVersion = false;
|
||||
@@ -695,39 +697,41 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
value = Jsoup.parse(value).text();
|
||||
}
|
||||
if (IGNORE_VALUES.contains(value)) {
|
||||
//noinspection UnnecessaryContinue
|
||||
continue;
|
||||
} else if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_TITLE.toString())) {
|
||||
foundSomething = true;
|
||||
productEvidence.addEvidence(source, key, value, Confidence.HIGH);
|
||||
addMatchingValues(classInformation, value, productEvidence);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, source, key, value, Confidence.HIGH);
|
||||
addMatchingProductValues(classInformation, value, dependency);
|
||||
} else if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_VERSION.toString())) {
|
||||
hasImplementationVersion = true;
|
||||
foundSomething = true;
|
||||
versionEvidence.addEvidence(source, key, value, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VERSION, source, key, value, Confidence.HIGH);
|
||||
} else if ("specification-version".equalsIgnoreCase(key)) {
|
||||
specificationVersion = value;
|
||||
} else if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_VENDOR.toString())) {
|
||||
foundSomething = true;
|
||||
vendorEvidence.addEvidence(source, key, value, Confidence.HIGH);
|
||||
addMatchingValues(classInformation, value, vendorEvidence);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, source, key, value, Confidence.HIGH);
|
||||
addMatchingVendorValues(classInformation, value, dependency);
|
||||
} else if (key.equalsIgnoreCase(IMPLEMENTATION_VENDOR_ID)) {
|
||||
foundSomething = true;
|
||||
vendorEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
|
||||
addMatchingValues(classInformation, value, vendorEvidence);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, source, key, value, Confidence.MEDIUM);
|
||||
addMatchingVendorValues(classInformation, value, dependency);
|
||||
} else if (key.equalsIgnoreCase(BUNDLE_DESCRIPTION)) {
|
||||
foundSomething = true;
|
||||
addDescription(dependency, value, "manifest", key);
|
||||
addMatchingValues(classInformation, value, productEvidence);
|
||||
addMatchingProductValues(classInformation, value, dependency);
|
||||
} else if (key.equalsIgnoreCase(BUNDLE_NAME)) {
|
||||
foundSomething = true;
|
||||
productEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
|
||||
addMatchingValues(classInformation, value, productEvidence);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, source, key, value, Confidence.MEDIUM);
|
||||
addMatchingProductValues(classInformation, value, dependency);
|
||||
// //the following caused false positives.
|
||||
// } else if (key.equalsIgnoreCase(BUNDLE_VENDOR)) {
|
||||
} else if (key.equalsIgnoreCase(BUNDLE_VERSION)) {
|
||||
foundSomething = true;
|
||||
versionEvidence.addEvidence(source, key, value, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VERSION, source, key, value, Confidence.HIGH);
|
||||
} else if (key.equalsIgnoreCase(Attributes.Name.MAIN_CLASS.toString())) {
|
||||
//noinspection UnnecessaryContinue
|
||||
continue;
|
||||
//skipping main class as if this has important information to add it will be added during class name analysis...
|
||||
} else {
|
||||
@@ -746,7 +750,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
foundSomething = true;
|
||||
if (key.contains("version")) {
|
||||
if (!key.contains("specification")) {
|
||||
versionEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
|
||||
dependency.addEvidence(EvidenceType.VERSION, source, key, value, Confidence.MEDIUM);
|
||||
}
|
||||
} else if ("build-id".equals(key)) {
|
||||
int pos = value.indexOf('(');
|
||||
@@ -757,37 +761,37 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if (pos > 0) {
|
||||
value = value.substring(0, pos - 1);
|
||||
}
|
||||
versionEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
|
||||
dependency.addEvidence(EvidenceType.VERSION, source, key, value, Confidence.MEDIUM);
|
||||
} else if (key.contains("title")) {
|
||||
productEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
|
||||
addMatchingValues(classInformation, value, productEvidence);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, source, key, value, Confidence.MEDIUM);
|
||||
addMatchingProductValues(classInformation, value, dependency);
|
||||
} else if (key.contains("vendor")) {
|
||||
if (key.contains("specification")) {
|
||||
vendorEvidence.addEvidence(source, key, value, Confidence.LOW);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, source, key, value, Confidence.LOW);
|
||||
} else {
|
||||
vendorEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
|
||||
addMatchingValues(classInformation, value, vendorEvidence);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, source, key, value, Confidence.MEDIUM);
|
||||
addMatchingVendorValues(classInformation, value, dependency);
|
||||
}
|
||||
} else if (key.contains("name")) {
|
||||
productEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
|
||||
vendorEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
|
||||
addMatchingValues(classInformation, value, vendorEvidence);
|
||||
addMatchingValues(classInformation, value, productEvidence);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, source, key, value, Confidence.MEDIUM);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, source, key, value, Confidence.MEDIUM);
|
||||
addMatchingVendorValues(classInformation, value, dependency);
|
||||
addMatchingProductValues(classInformation, value, dependency);
|
||||
} else if (key.contains("license")) {
|
||||
addLicense(dependency, value);
|
||||
} else if (key.contains("description")) {
|
||||
addDescription(dependency, value, "manifest", key);
|
||||
} else {
|
||||
productEvidence.addEvidence(source, key, value, Confidence.LOW);
|
||||
vendorEvidence.addEvidence(source, key, value, Confidence.LOW);
|
||||
addMatchingValues(classInformation, value, vendorEvidence);
|
||||
addMatchingValues(classInformation, value, productEvidence);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, source, key, value, Confidence.LOW);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, source, key, value, Confidence.LOW);
|
||||
addMatchingVendorValues(classInformation, value, dependency);
|
||||
addMatchingProductValues(classInformation, value, dependency);
|
||||
if (value.matches(".*\\d.*")) {
|
||||
final StringTokenizer tokenizer = new StringTokenizer(value, " ");
|
||||
while (tokenizer.hasMoreElements()) {
|
||||
final String s = tokenizer.nextToken();
|
||||
if (s.matches("^[0-9.]+$")) {
|
||||
versionEvidence.addEvidence(source, key, s, Confidence.LOW);
|
||||
dependency.addEvidence(EvidenceType.VERSION, source, key, s, Confidence.LOW);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -804,25 +808,25 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final String value = atts.getValue(key);
|
||||
if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_TITLE.toString())) {
|
||||
foundSomething = true;
|
||||
productEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
|
||||
addMatchingValues(classInformation, value, productEvidence);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, source, key, value, Confidence.MEDIUM);
|
||||
addMatchingProductValues(classInformation, value, dependency);
|
||||
} else if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_VERSION.toString())) {
|
||||
foundSomething = true;
|
||||
versionEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
|
||||
dependency.addEvidence(EvidenceType.VERSION, source, key, value, Confidence.MEDIUM);
|
||||
} else if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_VENDOR.toString())) {
|
||||
foundSomething = true;
|
||||
vendorEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
|
||||
addMatchingValues(classInformation, value, vendorEvidence);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, source, key, value, Confidence.MEDIUM);
|
||||
addMatchingVendorValues(classInformation, value, dependency);
|
||||
} else if (key.equalsIgnoreCase(Attributes.Name.SPECIFICATION_TITLE.toString())) {
|
||||
foundSomething = true;
|
||||
productEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
|
||||
addMatchingValues(classInformation, value, productEvidence);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, source, key, value, Confidence.MEDIUM);
|
||||
addMatchingProductValues(classInformation, value, dependency);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (specificationVersion != null && !hasImplementationVersion) {
|
||||
foundSomething = true;
|
||||
versionEvidence.addEvidence(source, "specification-version", specificationVersion, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VERSION, source, "specification-version", specificationVersion, Confidence.HIGH);
|
||||
}
|
||||
}
|
||||
return foundSomething;
|
||||
@@ -880,11 +884,11 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if (pos > 0) {
|
||||
desc = desc.substring(0, pos) + "...";
|
||||
}
|
||||
dependency.getProductEvidence().addEvidence(source, key, desc, Confidence.LOW);
|
||||
dependency.getVendorEvidence().addEvidence(source, key, desc, Confidence.LOW);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, source, key, desc, Confidence.LOW);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, source, key, desc, Confidence.LOW);
|
||||
} else {
|
||||
dependency.getProductEvidence().addEvidence(source, key, desc, Confidence.MEDIUM);
|
||||
dependency.getVendorEvidence().addEvidence(source, key, desc, Confidence.MEDIUM);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, source, key, desc, Confidence.MEDIUM);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, source, key, desc, Confidence.MEDIUM);
|
||||
}
|
||||
return desc;
|
||||
}
|
||||
@@ -911,13 +915,14 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Initializes the JarAnalyzer.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException is thrown if there is an exception
|
||||
* creating a temporary directory
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
public void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
try {
|
||||
final File baseDir = Settings.getTempDirectory();
|
||||
final File baseDir = getSettings().getTempDirectory();
|
||||
tempFileLocation = File.createTempFile("check", "tmp", baseDir);
|
||||
if (!tempFileLocation.delete()) {
|
||||
final String msg = String.format("Unable to delete temporary file '%s'.", tempFileLocation.getAbsolutePath());
|
||||
@@ -1053,9 +1058,9 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*
|
||||
* @param classes a collection of class name information
|
||||
* @param value the value to check to see if it contains a package name
|
||||
* @param evidence the evidence collection to add new entries too
|
||||
* @param dep the dependency to add new entries too
|
||||
*/
|
||||
private static void addMatchingValues(List<ClassNameInformation> classes, String value, EvidenceCollection evidence) {
|
||||
private static void addMatchingVendorValues(List<ClassNameInformation> classes, String value, Dependency dep) {
|
||||
if (value == null || value.isEmpty() || classes == null || classes.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
@@ -1065,7 +1070,32 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final Pattern p = Pattern.compile("\b" + key + "\b");
|
||||
if (p.matcher(text).find()) {
|
||||
//if (text.contains(key)) { //note, package structure elements are already lowercase.
|
||||
evidence.addEvidence("jar", "package name", key, Confidence.HIGHEST);
|
||||
dep.addEvidence(EvidenceType.VENDOR, "jar", "package name", key, Confidence.HIGHEST);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Cycles through the collection of class name information to see if parts
|
||||
* of the package names are contained in the provided value. If found, it
|
||||
* will be added as the HIGHEST confidence evidence because we have more
|
||||
* then one source corroborating the value.
|
||||
*
|
||||
* @param classes a collection of class name information
|
||||
* @param value the value to check to see if it contains a package name
|
||||
* @param dep the dependency to add new entries too
|
||||
*/
|
||||
private static void addMatchingProductValues(List<ClassNameInformation> classes, String value, Dependency dep) {
|
||||
if (value == null || value.isEmpty() || classes == null || classes.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
final String text = value.toLowerCase();
|
||||
for (ClassNameInformation cni : classes) {
|
||||
for (String key : cni.getPackageStructure()) {
|
||||
final Pattern p = Pattern.compile("\b" + key + "\b");
|
||||
if (p.matcher(text).find()) {
|
||||
//if (text.contains(key)) { //note, package structure elements are already lowercase.
|
||||
dep.addEvidence(EvidenceType.PRODUCT, "jar", "package name", key, Confidence.HIGHEST);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,6 +35,8 @@ import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.DownloadFailedException;
|
||||
import org.owasp.dependencycheck.utils.Downloader;
|
||||
@@ -59,6 +61,7 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -95,7 +98,18 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Field indicating if the analyzer is enabled.
|
||||
*/
|
||||
private final boolean enabled = checkEnabled();
|
||||
private boolean enabled = true;
|
||||
|
||||
/**
|
||||
* Initializes the analyzer with the configured settings.
|
||||
*
|
||||
* @param settings the configured settings to use
|
||||
*/
|
||||
@Override
|
||||
public void initialize(Settings settings) {
|
||||
super.initialize(settings);
|
||||
enabled = checkEnabled();
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if this analyzer is enabled
|
||||
@@ -110,8 +124,8 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
boolean retval = false;
|
||||
try {
|
||||
if (!DEFAULT_URL.equals(Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL))
|
||||
&& Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED)) {
|
||||
if (!DEFAULT_URL.equals(getSettings().getString(Settings.KEYS.ANALYZER_NEXUS_URL))
|
||||
&& getSettings().getBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED)) {
|
||||
LOGGER.info("Enabling Nexus analyzer");
|
||||
retval = true;
|
||||
} else {
|
||||
@@ -137,25 +151,25 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Initializes the analyzer once before any analysis is performed.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException if there's an error during initialization
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
public void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
LOGGER.debug("Initializing Nexus Analyzer");
|
||||
LOGGER.debug("Nexus Analyzer enabled: {}", isEnabled());
|
||||
if (isEnabled()) {
|
||||
final boolean useProxy = useProxy();
|
||||
final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL);
|
||||
LOGGER.debug("Nexus Analyzer URL: {}", searchUrl);
|
||||
LOGGER.debug("Using proxy: {}", useProxy);
|
||||
try {
|
||||
searcher = new NexusSearch(new URL(searchUrl), useProxy);
|
||||
searcher = new NexusSearch(getSettings(), useProxy);
|
||||
if (!searcher.preflightRequest()) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("There was an issue getting Nexus status. Disabling analyzer.");
|
||||
}
|
||||
} catch (MalformedURLException mue) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Malformed URL to Nexus: " + searchUrl, mue);
|
||||
throw new InitializationException("Malformed URL to Nexus", mue);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -223,7 +237,7 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
dependency.addAsEvidence("nexus", ma, Confidence.HIGH);
|
||||
boolean pomAnalyzed = false;
|
||||
LOGGER.debug("POM URL {}", ma.getPomUrl());
|
||||
for (Evidence e : dependency.getVendorEvidence()) {
|
||||
for (Evidence e : dependency.getEvidence(EvidenceType.VENDOR)) {
|
||||
if ("pom".equals(e.getSource())) {
|
||||
pomAnalyzed = true;
|
||||
break;
|
||||
@@ -232,7 +246,7 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if (!pomAnalyzed && ma.getPomUrl() != null) {
|
||||
File pomFile = null;
|
||||
try {
|
||||
final File baseDir = Settings.getTempDirectory();
|
||||
final File baseDir = getSettings().getTempDirectory();
|
||||
pomFile = File.createTempFile("pom", ".xml", baseDir);
|
||||
if (!pomFile.delete()) {
|
||||
LOGGER.warn("Unable to fetch pom.xml for {} from Nexus repository; "
|
||||
@@ -240,7 +254,8 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.debug("Unable to delete temp file");
|
||||
}
|
||||
LOGGER.debug("Downloading {}", ma.getPomUrl());
|
||||
Downloader.fetchFile(new URL(ma.getPomUrl()), pomFile);
|
||||
final Downloader downloader = new Downloader(getSettings());
|
||||
downloader.fetchFile(new URL(ma.getPomUrl()), pomFile);
|
||||
PomUtils.analyzePOM(dependency, pomFile);
|
||||
} catch (DownloadFailedException ex) {
|
||||
LOGGER.warn("Unable to download pom.xml for {} from Nexus repository; "
|
||||
@@ -266,14 +281,14 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if a proxy should be used.
|
||||
* Determine if a proxy should be used for the Nexus Analyzer.
|
||||
*
|
||||
* @return {@code true} if a proxy should be used
|
||||
*/
|
||||
public static boolean useProxy() {
|
||||
public boolean useProxy() {
|
||||
try {
|
||||
return Settings.getString(Settings.KEYS.PROXY_SERVER) != null
|
||||
&& Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY);
|
||||
return getSettings().getString(Settings.KEYS.PROXY_SERVER) != null
|
||||
&& getSettings().getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY);
|
||||
} catch (InvalidSettingException ise) {
|
||||
LOGGER.warn("Failed to parse proxy settings.", ise);
|
||||
return false;
|
||||
|
||||
@@ -22,7 +22,6 @@ import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
@@ -32,6 +31,7 @@ import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import javax.json.Json;
|
||||
import javax.json.JsonException;
|
||||
import javax.json.JsonObject;
|
||||
@@ -39,6 +39,7 @@ import javax.json.JsonReader;
|
||||
import javax.json.JsonString;
|
||||
import javax.json.JsonValue;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
|
||||
/**
|
||||
* Used to analyze Node Package Manager (npm) package.json files, and collect
|
||||
@@ -47,6 +48,7 @@ import org.owasp.dependencycheck.exception.InitializationException;
|
||||
* @author Dale Visser
|
||||
*/
|
||||
@Experimental
|
||||
@ThreadSafe
|
||||
public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -85,7 +87,7 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
protected void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
// NO-OP
|
||||
}
|
||||
|
||||
@@ -123,26 +125,25 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
final File file = dependency.getActualFile();
|
||||
if (!file.isFile() || file.length()==0) {
|
||||
if (!file.isFile() || file.length() == 0) {
|
||||
return;
|
||||
}
|
||||
try (JsonReader jsonReader = Json.createReader(FileUtils.openInputStream(file))) {
|
||||
final JsonObject json = jsonReader.readObject();
|
||||
final EvidenceCollection productEvidence = dependency.getProductEvidence();
|
||||
final EvidenceCollection vendorEvidence = dependency.getVendorEvidence();
|
||||
if (json.containsKey("name")) {
|
||||
final Object value = json.get("name");
|
||||
if (value instanceof JsonString) {
|
||||
final String valueString = ((JsonString) value).getString();
|
||||
productEvidence.addEvidence(PACKAGE_JSON, "name", valueString, Confidence.HIGHEST);
|
||||
vendorEvidence.addEvidence(PACKAGE_JSON, "name_project", String.format("%s_project", valueString), Confidence.LOW);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, PACKAGE_JSON, "name", valueString, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, PACKAGE_JSON, "name_project",
|
||||
String.format("%s_project", valueString), Confidence.LOW);
|
||||
} else {
|
||||
LOGGER.warn("JSON value not string as expected: {}", value);
|
||||
}
|
||||
}
|
||||
addToEvidence(json, productEvidence, "description");
|
||||
addToEvidence(json, vendorEvidence, "author");
|
||||
addToEvidence(json, dependency.getVersionEvidence(), "version");
|
||||
addToEvidence(dependency, EvidenceType.PRODUCT, json, "description");
|
||||
addToEvidence(dependency, EvidenceType.VENDOR, json, "author");
|
||||
addToEvidence(dependency, EvidenceType.VERSION, json, "version");
|
||||
dependency.setDisplayFileName(String.format("%s/%s", file.getParentFile().getName(), file.getName()));
|
||||
} catch (JsonException e) {
|
||||
LOGGER.warn("Failed to parse package.json file.", e);
|
||||
@@ -155,22 +156,24 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* Adds information to an evidence collection from the node json
|
||||
* configuration.
|
||||
*
|
||||
* @param dep the dependency to add the evidence
|
||||
* @param t the type of evidence to add
|
||||
* @param json information from node.js
|
||||
* @param collection a set of evidence about a dependency
|
||||
* @param key the key to obtain the data from the json information
|
||||
*/
|
||||
private void addToEvidence(JsonObject json, EvidenceCollection collection, String key) {
|
||||
private void addToEvidence(Dependency dep, EvidenceType t, JsonObject json, String key) {
|
||||
if (json.containsKey(key)) {
|
||||
final JsonValue value = json.get(key);
|
||||
if (value instanceof JsonString) {
|
||||
collection.addEvidence(PACKAGE_JSON, key, ((JsonString) value).getString(), Confidence.HIGHEST);
|
||||
dep.addEvidence(t, PACKAGE_JSON, key, ((JsonString) value).getString(), Confidence.HIGHEST);
|
||||
|
||||
} else if (value instanceof JsonObject) {
|
||||
final JsonObject jsonObject = (JsonObject) value;
|
||||
for (final Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
|
||||
final String property = entry.getKey();
|
||||
final JsonValue subValue = entry.getValue();
|
||||
if (subValue instanceof JsonString) {
|
||||
collection.addEvidence(PACKAGE_JSON,
|
||||
dep.addEvidence(t, PACKAGE_JSON,
|
||||
String.format("%s.%s", key, property),
|
||||
((JsonString) subValue).getString(),
|
||||
Confidence.HIGHEST);
|
||||
|
||||
@@ -25,7 +25,6 @@ import org.owasp.dependencycheck.data.nsp.NspSearch;
|
||||
import org.owasp.dependencycheck.data.nsp.SanitizePackage;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
|
||||
@@ -37,11 +36,11 @@ import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import javax.json.Json;
|
||||
import javax.json.JsonArray;
|
||||
import javax.json.JsonException;
|
||||
@@ -50,6 +49,7 @@ import javax.json.JsonObjectBuilder;
|
||||
import javax.json.JsonReader;
|
||||
import javax.json.JsonString;
|
||||
import javax.json.JsonValue;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.URLConnectionFailureException;
|
||||
|
||||
@@ -59,6 +59,7 @@ import org.owasp.dependencycheck.utils.URLConnectionFailureException;
|
||||
*
|
||||
* @author Steve Springett
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class NspAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -100,17 +101,17 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Initializes the analyzer once before any analysis is performed.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException if there's an error during initialization
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
public void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
LOGGER.debug("Initializing {}", getName());
|
||||
final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_NSP_URL, DEFAULT_URL);
|
||||
try {
|
||||
searcher = new NspSearch(new URL(searchUrl));
|
||||
searcher = new NspSearch(getSettings());
|
||||
} catch (MalformedURLException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("The configured URL to Node Security Platform is malformed: " + searchUrl, ex);
|
||||
throw new InitializationException("The configured URL to Node Security Platform is malformed", ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -148,7 +149,7 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
final File file = dependency.getActualFile();
|
||||
if (!file.isFile() || file.length()==0) {
|
||||
if (!file.isFile() || file.length() == 0) {
|
||||
return;
|
||||
}
|
||||
try (JsonReader jsonReader = Json.createReader(FileUtils.openInputStream(file))) {
|
||||
@@ -197,20 +198,19 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
vuln.setVulnerableSoftware(new HashSet<>(Arrays.asList(vs)));
|
||||
|
||||
// Add the vulnerability to package.json
|
||||
dependency.getVulnerabilities().add(vuln);
|
||||
dependency.addVulnerability(vuln);
|
||||
}
|
||||
|
||||
/*
|
||||
* Adds evidence about the node package itself, not any of the modules.
|
||||
*/
|
||||
final EvidenceCollection productEvidence = dependency.getProductEvidence();
|
||||
final EvidenceCollection vendorEvidence = dependency.getVendorEvidence();
|
||||
if (packageJson.containsKey("name")) {
|
||||
final Object value = packageJson.get("name");
|
||||
if (value instanceof JsonString) {
|
||||
final String valueString = ((JsonString) value).getString();
|
||||
productEvidence.addEvidence(PACKAGE_JSON, "name", valueString, Confidence.HIGHEST);
|
||||
vendorEvidence.addEvidence(PACKAGE_JSON, "name_project", String.format("%s_project", valueString), Confidence.LOW);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, PACKAGE_JSON, "name", valueString, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, PACKAGE_JSON, "name_project",
|
||||
String.format("%s_project", valueString), Confidence.LOW);
|
||||
} else {
|
||||
LOGGER.warn("JSON value not string as expected: {}", value);
|
||||
}
|
||||
@@ -259,9 +259,9 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/*
|
||||
* Adds general evidence to about the package.
|
||||
*/
|
||||
addToEvidence(packageJson, productEvidence, "description");
|
||||
addToEvidence(packageJson, vendorEvidence, "author");
|
||||
addToEvidence(packageJson, dependency.getVersionEvidence(), "version");
|
||||
addToEvidence(dependency, EvidenceType.PRODUCT, packageJson, "description");
|
||||
addToEvidence(dependency, EvidenceType.VENDOR, packageJson, "author");
|
||||
addToEvidence(dependency, EvidenceType.VERSION, packageJson, "version");
|
||||
dependency.setDisplayFileName(String.format("%s/%s", file.getParentFile().getName(), file.getName()));
|
||||
} catch (URLConnectionFailureException e) {
|
||||
this.setEnabled(false);
|
||||
@@ -276,19 +276,19 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a part of package.json (as defined by JsonArray) and update
|
||||
* the specified dependency with relevant info.
|
||||
* Processes a part of package.json (as defined by JsonArray) and update the
|
||||
* specified dependency with relevant info.
|
||||
*
|
||||
* @param dependency the Dependency to update
|
||||
* @param jsonArray the jsonArray to parse
|
||||
* @param depType the dependency type
|
||||
*/
|
||||
private void processPackage(Dependency dependency, JsonArray jsonArray, String depType) {
|
||||
JsonObjectBuilder builder = Json.createObjectBuilder();
|
||||
final JsonObjectBuilder builder = Json.createObjectBuilder();
|
||||
for (JsonString str : jsonArray.getValuesAs(JsonString.class)) {
|
||||
builder.add(str.toString(), "");
|
||||
}
|
||||
JsonObject jsonObject = builder.build();
|
||||
final JsonObject jsonObject = builder.build();
|
||||
processPackage(dependency, jsonObject, depType);
|
||||
}
|
||||
|
||||
@@ -324,9 +324,12 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* dependency will not actually exist but needs to be unique (due to the use of Set in Dependency).
|
||||
* The use of related dependencies is a way to specify the actual software BOM in package.json.
|
||||
*/
|
||||
//TODO is this actually correct? or should these be transitive dependencies?
|
||||
final Dependency nodeModule = new Dependency(new File(dependency.getActualFile() + "#" + entry.getKey()), true);
|
||||
nodeModule.setDisplayFileName(entry.getKey());
|
||||
nodeModule.setIdentifiers(new HashSet<>(Arrays.asList(moduleName, moduleVersion, moduleDepType)));
|
||||
nodeModule.addIdentifier(moduleName);
|
||||
nodeModule.addIdentifier(moduleVersion);
|
||||
nodeModule.addIdentifier(moduleDepType);
|
||||
dependency.addRelatedDependency(nodeModule);
|
||||
}
|
||||
}
|
||||
@@ -336,22 +339,23 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* Adds information to an evidence collection from the node json
|
||||
* configuration.
|
||||
*
|
||||
* @param dep the dependency to which the evidence will be added
|
||||
* @param type the type of evidence to be added
|
||||
* @param json information from node.js
|
||||
* @param collection a set of evidence about a dependency
|
||||
* @param key the key to obtain the data from the json information
|
||||
*/
|
||||
private void addToEvidence(JsonObject json, EvidenceCollection collection, String key) {
|
||||
private void addToEvidence(Dependency dep, EvidenceType type, JsonObject json, String key) {
|
||||
if (json.containsKey(key)) {
|
||||
final JsonValue value = json.get(key);
|
||||
if (value instanceof JsonString) {
|
||||
collection.addEvidence(PACKAGE_JSON, key, ((JsonString) value).getString(), Confidence.HIGHEST);
|
||||
dep.addEvidence(type, PACKAGE_JSON, key, ((JsonString) value).getString(), Confidence.HIGHEST);
|
||||
} else if (value instanceof JsonObject) {
|
||||
final JsonObject jsonObject = (JsonObject) value;
|
||||
for (final Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
|
||||
final String property = entry.getKey();
|
||||
final JsonValue subValue = entry.getValue();
|
||||
if (subValue instanceof JsonString) {
|
||||
collection.addEvidence(PACKAGE_JSON,
|
||||
dep.addEvidence(type, PACKAGE_JSON,
|
||||
String.format("%s.%s", key, property),
|
||||
((JsonString) subValue).getString(),
|
||||
Confidence.HIGHEST);
|
||||
|
||||
@@ -33,6 +33,8 @@ import org.slf4j.LoggerFactory;
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
@@ -40,6 +42,7 @@ import org.owasp.dependencycheck.exception.InitializationException;
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -69,10 +72,11 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Initializes the analyzer once before any analysis is performed.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException if there's an error during initialization
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
public void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
//nothing to initialize
|
||||
}
|
||||
|
||||
@@ -137,13 +141,13 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
if (np.getOwners() != null) {
|
||||
dependency.getVendorEvidence().addEvidence("nuspec", "owners", np.getOwners(), Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, "nuspec", "owners", np.getOwners(), Confidence.HIGHEST);
|
||||
}
|
||||
dependency.getVendorEvidence().addEvidence("nuspec", "authors", np.getAuthors(), Confidence.HIGH);
|
||||
dependency.getVersionEvidence().addEvidence("nuspec", "version", np.getVersion(), Confidence.HIGHEST);
|
||||
dependency.getProductEvidence().addEvidence("nuspec", "id", np.getId(), Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, "nuspec", "authors", np.getAuthors(), Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VERSION, "nuspec", "version", np.getVersion(), Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, "nuspec", "id", np.getId(), Confidence.HIGHEST);
|
||||
if (np.getTitle() != null) {
|
||||
dependency.getProductEvidence().addEvidence("nuspec", "title", np.getTitle(), Confidence.MEDIUM);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, "nuspec", "title", np.getTitle(), Confidence.MEDIUM);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
throw new AnalysisException(e);
|
||||
|
||||
@@ -17,9 +17,8 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
@@ -27,9 +26,7 @@ import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* NvdCveAnalyzer is a utility class that takes a project dependency and
|
||||
@@ -38,49 +35,13 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class NvdCveAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger for use throughout the class
|
||||
*/
|
||||
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(NvdCveAnalyzer.class);
|
||||
|
||||
/**
|
||||
* The CVE Index.
|
||||
*/
|
||||
private CveDB cveDB;
|
||||
|
||||
/**
|
||||
* Opens the data source.
|
||||
*
|
||||
* @throws SQLException thrown when there is a SQL Exception
|
||||
* @throws IOException thrown when there is an IO Exception
|
||||
* @throws DatabaseException thrown when there is a database exceptions
|
||||
* @throws ClassNotFoundException thrown if the h2 database driver cannot be
|
||||
* loaded
|
||||
*/
|
||||
public void open() throws SQLException, IOException, DatabaseException, ClassNotFoundException {
|
||||
cveDB = CveDB.getInstance();
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the data source.
|
||||
*/
|
||||
@Override
|
||||
public void closeAnalyzer() {
|
||||
cveDB.close();
|
||||
cveDB = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the status of the data source - is the database open.
|
||||
*
|
||||
* @return true or false.
|
||||
*/
|
||||
public boolean isOpen() {
|
||||
return cveDB != null;
|
||||
}
|
||||
|
||||
//private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(NvdCveAnalyzer.class);
|
||||
/**
|
||||
* Analyzes a dependency and attempts to determine if there are any CPE
|
||||
* identifiers for this dependency.
|
||||
@@ -92,12 +53,13 @@ public class NvdCveAnalyzer extends AbstractAnalyzer {
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
final CveDB cveDB = engine.getDatabase();
|
||||
for (Identifier id : dependency.getIdentifiers()) {
|
||||
if ("cpe".equals(id.getType())) {
|
||||
try {
|
||||
final String value = id.getValue();
|
||||
final List<Vulnerability> vulns = cveDB.getVulnerabilities(value);
|
||||
dependency.getVulnerabilities().addAll(vulns);
|
||||
dependency.addVulnerabilities(vulns);
|
||||
} catch (DatabaseException ex) {
|
||||
throw new AnalysisException(ex);
|
||||
}
|
||||
@@ -108,7 +70,7 @@ public class NvdCveAnalyzer extends AbstractAnalyzer {
|
||||
try {
|
||||
final String value = id.getValue();
|
||||
final List<Vulnerability> vulns = cveDB.getVulnerabilities(value);
|
||||
dependency.getSuppressedVulnerabilities().addAll(vulns);
|
||||
dependency.addSuppressedVulnerabilities(vulns);
|
||||
} catch (DatabaseException ex) {
|
||||
throw new AnalysisException(ex);
|
||||
}
|
||||
@@ -146,29 +108,4 @@ public class NvdCveAnalyzer extends AbstractAnalyzer {
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_NVD_CVE_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the database used to gather NVD CVE data.
|
||||
*
|
||||
* @throws InitializationException is thrown if there is an issue opening
|
||||
* the index.
|
||||
*/
|
||||
@Override
|
||||
public void initializeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
this.open();
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.debug("SQL Exception initializing NvdCveAnalyzer", ex);
|
||||
throw new InitializationException(ex);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("IO Exception initializing NvdCveAnalyzer", ex);
|
||||
throw new InitializationException(ex);
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.debug("Database Exception initializing NvdCveAnalyzer", ex);
|
||||
throw new InitializationException(ex);
|
||||
} catch (ClassNotFoundException ex) {
|
||||
LOGGER.debug("Exception initializing NvdCveAnalyzer", ex);
|
||||
throw new InitializationException(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,6 +31,8 @@ import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
@@ -38,6 +40,7 @@ import org.owasp.dependencycheck.exception.InitializationException;
|
||||
*
|
||||
* @author Dale Visser
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -143,13 +146,24 @@ public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
return OPENSSLV_FILTER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the setting for the analyzer enabled setting key.
|
||||
*
|
||||
* @return the setting for the analyzer enabled setting key
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_OPENSSL_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* No-op initializer implementation.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException never thrown
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
protected void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
// Nothing to do here.
|
||||
}
|
||||
|
||||
@@ -171,17 +185,17 @@ public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if (!contents.isEmpty()) {
|
||||
final Matcher matcher = VERSION_PATTERN.matcher(contents);
|
||||
if (matcher.find()) {
|
||||
dependency.getVersionEvidence().addEvidence(OPENSSLV_H, "Version Constant",
|
||||
dependency.addEvidence(EvidenceType.VERSION, OPENSSLV_H, "Version Constant",
|
||||
getOpenSSLVersion(Long.parseLong(matcher.group(1), HEXADECIMAL)), Confidence.HIGH);
|
||||
found = true;
|
||||
}
|
||||
}
|
||||
if (found) {
|
||||
dependency.setDisplayFileName(parentName + File.separatorChar + OPENSSLV_H);
|
||||
dependency.getVendorEvidence().addEvidence(OPENSSLV_H, "Vendor", "OpenSSL", Confidence.HIGHEST);
|
||||
dependency.getProductEvidence().addEvidence(OPENSSLV_H, "Product", "OpenSSL", Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, OPENSSLV_H, "Vendor", "OpenSSL", Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, OPENSSLV_H, "Product", "OpenSSL", Confidence.HIGHEST);
|
||||
} else {
|
||||
engine.getDependencies().remove(dependency);
|
||||
engine.removeDependency(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -201,14 +215,4 @@ public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
"Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the setting for the analyzer enabled setting key.
|
||||
*
|
||||
* @return the setting for the analyzer enabled setting key
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_OPENSSL_ENABLED;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,7 +32,6 @@ import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -46,6 +45,8 @@ import org.owasp.dependencycheck.utils.FileUtils;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.UrlStringUtils;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
|
||||
/**
|
||||
* Used to analyze a Wheel or egg distribution files, or their contents in
|
||||
@@ -55,30 +56,26 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
* @author Dale Visser
|
||||
*/
|
||||
@Experimental
|
||||
@ThreadSafe
|
||||
public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* Name of egg metadata files to analyze.
|
||||
*/
|
||||
private static final String PKG_INFO = "PKG-INFO";
|
||||
|
||||
/**
|
||||
* Name of wheel metadata files to analyze.
|
||||
*/
|
||||
private static final String METADATA = "METADATA";
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory
|
||||
.getLogger(PythonDistributionAnalyzer.class);
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(PythonDistributionAnalyzer.class);
|
||||
/**
|
||||
* The count of directories created during analysis. This is used for
|
||||
* creating temporary directories.
|
||||
*/
|
||||
private static final AtomicInteger DIR_COUNT = new AtomicInteger(0);
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
@@ -87,52 +84,39 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
|
||||
|
||||
/**
|
||||
* The set of file extensions supported by this analyzer.
|
||||
*/
|
||||
private static final String[] EXTENSIONS = {"whl", "egg", "zip"};
|
||||
|
||||
/**
|
||||
* Used to match on egg archive candidate extensions.
|
||||
*/
|
||||
private static final FileFilter EGG_OR_ZIP = FileFilterBuilder.newInstance().addExtensions("egg", "zip").build();
|
||||
|
||||
/**
|
||||
* Used to detect files with a .whl extension.
|
||||
*/
|
||||
private static final FileFilter WHL_FILTER = FileFilterBuilder.newInstance().addExtensions("whl").build();
|
||||
|
||||
/**
|
||||
* The parent directory for the individual directories per archive.
|
||||
*/
|
||||
private File tempFileLocation;
|
||||
|
||||
/**
|
||||
* Filter that detects *.dist-info files (but doesn't verify they are
|
||||
* directories.
|
||||
*/
|
||||
private static final FilenameFilter DIST_INFO_FILTER = new SuffixFileFilter(
|
||||
".dist-info");
|
||||
|
||||
private static final FilenameFilter DIST_INFO_FILTER = new SuffixFileFilter(".dist-info");
|
||||
/**
|
||||
* Filter that detects files named "METADATA".
|
||||
*/
|
||||
private static final FilenameFilter EGG_INFO_FILTER = new NameFileFilter(
|
||||
"EGG-INFO");
|
||||
|
||||
private static final FilenameFilter EGG_INFO_FILTER = new NameFileFilter("EGG-INFO");
|
||||
/**
|
||||
* Filter that detects files named "METADATA".
|
||||
*/
|
||||
private static final NameFileFilter METADATA_FILTER = new NameFileFilter(
|
||||
METADATA);
|
||||
|
||||
private static final NameFileFilter METADATA_FILTER = new NameFileFilter(METADATA);
|
||||
/**
|
||||
* Filter that detects files named "PKG-INFO".
|
||||
*/
|
||||
private static final NameFileFilter PKG_INFO_FILTER = new NameFileFilter(
|
||||
PKG_INFO);
|
||||
|
||||
private static final NameFileFilter PKG_INFO_FILTER = new NameFileFilter(PKG_INFO);
|
||||
/**
|
||||
* The file filter used to determine which files this analyzer supports.
|
||||
*/
|
||||
@@ -241,13 +225,14 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Makes sure a usable temporary directory is available.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException an AnalyzeException is thrown when the
|
||||
* temp directory cannot be created
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
protected void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
try {
|
||||
final File baseDir = Settings.getTempDirectory();
|
||||
final File baseDir = getSettings().getTempDirectory();
|
||||
tempFileLocation = File.createTempFile("check", "tmp", baseDir);
|
||||
if (!tempFileLocation.delete()) {
|
||||
setEnabled(false);
|
||||
@@ -294,41 +279,36 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
private static void collectWheelMetadata(Dependency dependency, File file) {
|
||||
final InternetHeaders headers = getManifestProperties(file);
|
||||
addPropertyToEvidence(headers, dependency.getVersionEvidence(),
|
||||
"Version", Confidence.HIGHEST);
|
||||
addPropertyToEvidence(headers, dependency.getProductEvidence(), "Name",
|
||||
Confidence.HIGHEST);
|
||||
addPropertyToEvidence(dependency, EvidenceType.VERSION, Confidence.HIGHEST, headers, "Version");
|
||||
addPropertyToEvidence(dependency, EvidenceType.PRODUCT, Confidence.HIGHEST, headers, "Name");
|
||||
final String url = headers.getHeader("Home-page", null);
|
||||
final EvidenceCollection vendorEvidence = dependency
|
||||
.getVendorEvidence();
|
||||
if (StringUtils.isNotBlank(url)) {
|
||||
if (UrlStringUtils.isUrl(url)) {
|
||||
vendorEvidence.addEvidence(METADATA, "vendor", url,
|
||||
Confidence.MEDIUM);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, METADATA, "vendor", url, Confidence.MEDIUM);
|
||||
}
|
||||
}
|
||||
addPropertyToEvidence(headers, vendorEvidence, "Author", Confidence.LOW);
|
||||
addPropertyToEvidence(dependency, EvidenceType.VENDOR, Confidence.LOW, headers, "Author");
|
||||
final String summary = headers.getHeader("Summary", null);
|
||||
if (StringUtils.isNotBlank(summary)) {
|
||||
JarAnalyzer
|
||||
.addDescription(dependency, summary, METADATA, "summary");
|
||||
JarAnalyzer.addDescription(dependency, summary, METADATA, "summary");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a value to the evidence collection.
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param type the type of evidence to add
|
||||
* @param confidence the confidence in the evidence being added
|
||||
* @param headers the properties collection
|
||||
* @param evidence the evidence collection to add the value
|
||||
* @param property the property name
|
||||
* @param confidence the confidence of the evidence
|
||||
*/
|
||||
private static void addPropertyToEvidence(InternetHeaders headers,
|
||||
EvidenceCollection evidence, String property, Confidence confidence) {
|
||||
private static void addPropertyToEvidence(Dependency dependency, EvidenceType type, Confidence confidence,
|
||||
InternetHeaders headers, String property) {
|
||||
final String value = headers.getHeader(property, null);
|
||||
LOGGER.debug("Property: {}, Value: {}", property, value);
|
||||
if (StringUtils.isNotBlank(value)) {
|
||||
evidence.addEvidence(METADATA, property, value, confidence);
|
||||
dependency.addEvidence(type, METADATA, property, value, confidence);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -24,7 +24,6 @@ import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.UrlStringUtils;
|
||||
@@ -35,6 +34,8 @@ import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
@@ -44,13 +45,13 @@ import org.owasp.dependencycheck.exception.InitializationException;
|
||||
* @author Dale Visser
|
||||
*/
|
||||
@Experimental
|
||||
@ThreadSafe
|
||||
public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* Used when compiling file scanning regex patterns.
|
||||
*/
|
||||
private static final int REGEX_OPTIONS = Pattern.DOTALL
|
||||
| Pattern.CASE_INSENSITIVE;
|
||||
private static final int REGEX_OPTIONS = Pattern.DOTALL | Pattern.CASE_INSENSITIVE;
|
||||
|
||||
/**
|
||||
* Filename extensions for files to be analyzed.
|
||||
@@ -58,16 +59,14 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
private static final String EXTENSIONS = "py";
|
||||
|
||||
/**
|
||||
* Pattern for matching the module docstring in a source file.
|
||||
* Pattern for matching the module doc string in a source file.
|
||||
*/
|
||||
private static final Pattern MODULE_DOCSTRING = Pattern.compile(
|
||||
"^(['\\\"]{3})(.*?)\\1", REGEX_OPTIONS);
|
||||
private static final Pattern MODULE_DOCSTRING = Pattern.compile("^(['\\\"]{3})(.*?)\\1", REGEX_OPTIONS);
|
||||
|
||||
/**
|
||||
* Matches assignments to version variables in Python source code.
|
||||
*/
|
||||
private static final Pattern VERSION_PATTERN = Pattern.compile(
|
||||
"\\b(__)?version(__)? *= *(['\"]+)(\\d+\\.\\d+.*?)\\3",
|
||||
private static final Pattern VERSION_PATTERN = Pattern.compile("\\b(__)?version(__)? *= *(['\"]+)(\\d+\\.\\d+.*?)\\3",
|
||||
REGEX_OPTIONS);
|
||||
|
||||
/**
|
||||
@@ -130,6 +129,16 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
return AnalysisPhase.INFORMATION_COLLECTION;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key name for the analyzers enabled setting.
|
||||
*
|
||||
* @return the key name for the analyzers enabled setting
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the FileFilter
|
||||
*
|
||||
@@ -143,10 +152,11 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* No-op initializer implementation.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException never thrown
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
protected void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
// Nothing to do here.
|
||||
}
|
||||
|
||||
@@ -181,8 +191,7 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
//"The __init__.py files are required to make Python treat the directories as containing packages"
|
||||
//see section "6.4 Packages" from https://docs.python.org/2/tutorial/modules.html;
|
||||
dependency.setDisplayFileName(parentName + "/__init__.py");
|
||||
dependency.getProductEvidence().addEvidence(file.getName(),
|
||||
"PackageName", parentName, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, file.getName(), "PackageName", parentName, Confidence.HIGHEST);
|
||||
|
||||
final File[] fileList = parent.listFiles(PY_FILTER);
|
||||
if (fileList != null) {
|
||||
@@ -191,7 +200,7 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
engine.getDependencies().remove(dependency);
|
||||
engine.removeDependency(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -211,32 +220,28 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
try {
|
||||
contents = FileUtils.readFileToString(file, Charset.defaultCharset()).trim();
|
||||
} catch (IOException e) {
|
||||
throw new AnalysisException(
|
||||
"Problem occurred while reading dependency file.", e);
|
||||
throw new AnalysisException("Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
boolean found = false;
|
||||
if (!contents.isEmpty()) {
|
||||
final String source = file.getName();
|
||||
found = gatherEvidence(VERSION_PATTERN, contents, source,
|
||||
dependency.getVersionEvidence(), "SourceVersion",
|
||||
Confidence.MEDIUM);
|
||||
found = gatherEvidence(dependency, EvidenceType.VERSION, VERSION_PATTERN, contents,
|
||||
source, "SourceVersion", Confidence.MEDIUM);
|
||||
found |= addSummaryInfo(dependency, SUMMARY_PATTERN, 4, contents,
|
||||
source, "summary");
|
||||
if (INIT_PY_FILTER.accept(file)) {
|
||||
found |= addSummaryInfo(dependency, MODULE_DOCSTRING, 2,
|
||||
contents, source, "docstring");
|
||||
}
|
||||
found |= gatherEvidence(TITLE_PATTERN, contents, source,
|
||||
dependency.getProductEvidence(), "SourceTitle",
|
||||
Confidence.LOW);
|
||||
final EvidenceCollection vendorEvidence = dependency
|
||||
.getVendorEvidence();
|
||||
found |= gatherEvidence(AUTHOR_PATTERN, contents, source,
|
||||
vendorEvidence, "SourceAuthor", Confidence.MEDIUM);
|
||||
found |= gatherHomePageEvidence(URI_PATTERN, vendorEvidence,
|
||||
found |= gatherEvidence(dependency, EvidenceType.PRODUCT, TITLE_PATTERN, contents,
|
||||
source, "SourceTitle", Confidence.LOW);
|
||||
|
||||
found |= gatherEvidence(dependency, EvidenceType.VENDOR, AUTHOR_PATTERN, contents,
|
||||
source, "SourceAuthor", Confidence.MEDIUM);
|
||||
found |= gatherHomePageEvidence(dependency, EvidenceType.VENDOR, URI_PATTERN,
|
||||
source, "URL", contents);
|
||||
found |= gatherHomePageEvidence(HOMEPAGE_PATTERN,
|
||||
vendorEvidence, source, "HomePage", contents);
|
||||
found |= gatherHomePageEvidence(dependency, EvidenceType.VENDOR, HOMEPAGE_PATTERN,
|
||||
source, "HomePage", contents);
|
||||
}
|
||||
return found;
|
||||
}
|
||||
@@ -266,23 +271,23 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Collects evidence from the home page URL.
|
||||
*
|
||||
* @param dependency the dependency that is being analyzed
|
||||
* @param type the type of evidence
|
||||
* @param pattern the pattern to match
|
||||
* @param evidence the evidence collection to add the evidence to
|
||||
* @param source the source of the evidence
|
||||
* @param name the name of the evidence
|
||||
* @param contents the home page URL
|
||||
* @return true if evidence was collected; otherwise false
|
||||
*/
|
||||
private boolean gatherHomePageEvidence(Pattern pattern,
|
||||
EvidenceCollection evidence, String source, String name,
|
||||
String contents) {
|
||||
private boolean gatherHomePageEvidence(Dependency dependency, EvidenceType type, Pattern pattern,
|
||||
String source, String name, String contents) {
|
||||
final Matcher matcher = pattern.matcher(contents);
|
||||
boolean found = false;
|
||||
if (matcher.find()) {
|
||||
final String url = matcher.group(4);
|
||||
if (UrlStringUtils.isUrl(url)) {
|
||||
found = true;
|
||||
evidence.addEvidence(source, name, url, Confidence.MEDIUM);
|
||||
dependency.addEvidence(type, source, name, url, Confidence.MEDIUM);
|
||||
}
|
||||
}
|
||||
return found;
|
||||
@@ -292,27 +297,22 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* Gather evidence from a Python source file using the given string
|
||||
* assignment regex pattern.
|
||||
*
|
||||
* @param dependency the dependency that is being analyzed
|
||||
* @param type the type of evidence
|
||||
* @param pattern to scan contents with
|
||||
* @param contents of Python source file
|
||||
* @param source for storing evidence
|
||||
* @param evidence to store evidence in
|
||||
* @param name of evidence
|
||||
* @param confidence in evidence
|
||||
* @return whether evidence was found
|
||||
*/
|
||||
private boolean gatherEvidence(Pattern pattern, String contents,
|
||||
String source, EvidenceCollection evidence, String name,
|
||||
Confidence confidence) {
|
||||
private boolean gatherEvidence(Dependency dependency, EvidenceType type, Pattern pattern, String contents,
|
||||
String source, String name, Confidence confidence) {
|
||||
final Matcher matcher = pattern.matcher(contents);
|
||||
final boolean found = matcher.find();
|
||||
if (found) {
|
||||
evidence.addEvidence(source, name, matcher.group(4), confidence);
|
||||
dependency.addEvidence(type, source, name, matcher.group(4), confidence);
|
||||
}
|
||||
return found;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
@@ -36,6 +37,7 @@ import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.dependency.Reference;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
@@ -50,6 +52,7 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Dale Visser
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -90,7 +93,7 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* The DAL.
|
||||
*/
|
||||
private CveDB cvedb;
|
||||
private CveDB cvedb = null;
|
||||
|
||||
/**
|
||||
* @return a filter that accepts files named Gemfile.lock
|
||||
@@ -113,7 +116,7 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
throw new AnalysisException(String.format("%s should have been a directory.", folder.getAbsolutePath()));
|
||||
}
|
||||
final List<String> args = new ArrayList<>();
|
||||
final String bundleAuditPath = Settings.getString(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH);
|
||||
final String bundleAuditPath = getSettings().getString(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH);
|
||||
File bundleAudit = null;
|
||||
if (bundleAuditPath != null) {
|
||||
bundleAudit = new File(bundleAuditPath);
|
||||
@@ -140,22 +143,18 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* Initialize the analyzer. In this case, extract GrokAssembly.exe to a
|
||||
* temporary location.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException if anything goes wrong
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
cvedb = CveDB.getInstance();
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.warn("Exception opening the database");
|
||||
LOGGER.debug("error", ex);
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Error connecting to the database", ex);
|
||||
}
|
||||
public void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
// Now, need to see if bundle-audit actually runs from this location.
|
||||
if (engine != null) {
|
||||
this.cvedb = engine.getDatabase();
|
||||
}
|
||||
Process process = null;
|
||||
try {
|
||||
process = launchBundleAudit(Settings.getTempDirectory());
|
||||
process = launchBundleAudit(getSettings().getTempDirectory());
|
||||
} catch (AnalysisException ae) {
|
||||
|
||||
setEnabled(false);
|
||||
@@ -208,17 +207,6 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the data source.
|
||||
*/
|
||||
@Override
|
||||
public void closeAnalyzer() {
|
||||
if (cvedb != null) {
|
||||
cvedb.close();
|
||||
cvedb = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
@@ -377,7 +365,7 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
vulnerability.setName(advisory);
|
||||
}
|
||||
if (null != dependency) {
|
||||
dependency.getVulnerabilities().add(vulnerability); // needed to wait for vulnerability name to avoid NPE
|
||||
dependency.addVulnerability(vulnerability);
|
||||
}
|
||||
LOGGER.debug("bundle-audit ({}): {}", parentName, nextLine);
|
||||
}
|
||||
@@ -413,13 +401,21 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final String criticality = nextLine.substring(CRITICALITY.length()).trim();
|
||||
float score = -1.0f;
|
||||
Vulnerability v = null;
|
||||
try {
|
||||
v = cvedb.getVulnerability(vulnerability.getName());
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.debug("Unable to look up vulnerability {}", vulnerability.getName());
|
||||
if (cvedb != null) {
|
||||
try {
|
||||
v = cvedb.getVulnerability(vulnerability.getName());
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.debug("Unable to look up vulnerability {}", vulnerability.getName());
|
||||
}
|
||||
}
|
||||
if (v != null) {
|
||||
score = v.getCvssScore();
|
||||
vulnerability.setCvssAccessComplexity(v.getCvssAccessComplexity());
|
||||
vulnerability.setCvssAccessVector(v.getCvssAccessVector());
|
||||
vulnerability.setCvssAuthentication(v.getCvssAuthentication());
|
||||
vulnerability.setCvssAvailabilityImpact(v.getCvssAvailabilityImpact());
|
||||
vulnerability.setCvssConfidentialityImpact(v.getCvssConfidentialityImpact());
|
||||
vulnerability.setCvssIntegrityImpact(v.getCvssIntegrityImpact());
|
||||
} else if ("High".equalsIgnoreCase(criticality)) {
|
||||
score = 8.5f;
|
||||
} else if ("Medium".equalsIgnoreCase(criticality)) {
|
||||
@@ -445,7 +441,7 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
Vulnerability vulnerability = null;
|
||||
if (null != dependency) {
|
||||
final String version = nextLine.substring(VERSION.length());
|
||||
dependency.getVersionEvidence().addEvidence(
|
||||
dependency.addEvidence(EvidenceType.VERSION,
|
||||
"bundler-audit",
|
||||
"Version",
|
||||
version,
|
||||
@@ -477,7 +473,7 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws IOException thrown if a temporary gem file could not be written
|
||||
*/
|
||||
private Dependency createDependencyForGem(Engine engine, String parentName, String fileName, String filePath, String gem) throws IOException {
|
||||
final File gemFile = new File(Settings.getTempDirectory(), gem + "_Gemfile.lock");
|
||||
final File gemFile = new File(getSettings().getTempDirectory(), gem + "_Gemfile.lock");
|
||||
if (!gemFile.createNewFile()) {
|
||||
throw new IOException("Unable to create temporary gem file");
|
||||
}
|
||||
@@ -485,11 +481,11 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
FileUtils.write(gemFile, displayFileName, Charset.defaultCharset()); // unique contents to avoid dependency bundling
|
||||
final Dependency dependency = new Dependency(gemFile);
|
||||
dependency.getProductEvidence().addEvidence("bundler-audit", "Name", gem, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, "bundler-audit", "Name", gem, Confidence.HIGHEST);
|
||||
dependency.setDisplayFileName(displayFileName);
|
||||
dependency.setFileName(fileName);
|
||||
dependency.setFilePath(filePath);
|
||||
engine.getDependencies().add(dependency);
|
||||
engine.addDependency(dependency);
|
||||
return dependency;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FilenameFilter;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
@@ -47,6 +48,7 @@ import org.owasp.dependencycheck.dependency.Dependency;
|
||||
* @author Bianca Jiang (https://twitter.com/biancajiang)
|
||||
*/
|
||||
@Experimental
|
||||
@ThreadSafe
|
||||
public class RubyBundlerAnalyzer extends RubyGemspecAnalyzer {
|
||||
|
||||
/**
|
||||
|
||||
@@ -25,13 +25,14 @@ import java.nio.charset.Charset;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
@@ -46,6 +47,7 @@ import org.slf4j.LoggerFactory;
|
||||
* @author Dale Visser
|
||||
*/
|
||||
@Experimental
|
||||
@ThreadSafe
|
||||
public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -89,7 +91,7 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
protected void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
// NO-OP
|
||||
}
|
||||
|
||||
@@ -130,8 +132,7 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
private static final Pattern GEMSPEC_BLOCK_INIT = Pattern.compile("Gem::Specification\\.new\\s+?do\\s+?\\|(.+?)\\|");
|
||||
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
String contents;
|
||||
try {
|
||||
contents = FileUtils.readFileToString(dependency.getActualFile(), Charset.defaultCharset());
|
||||
@@ -144,23 +145,21 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
contents = contents.substring(matcher.end());
|
||||
final String blockVariable = matcher.group(1);
|
||||
|
||||
final EvidenceCollection vendor = dependency.getVendorEvidence();
|
||||
final EvidenceCollection product = dependency.getProductEvidence();
|
||||
final String name = addStringEvidence(product, contents, blockVariable, "name", "name", Confidence.HIGHEST);
|
||||
final String name = addStringEvidence(dependency, EvidenceType.PRODUCT, contents, blockVariable, "name", "name", Confidence.HIGHEST);
|
||||
if (!name.isEmpty()) {
|
||||
vendor.addEvidence(GEMSPEC, "name_project", name + "_project", Confidence.LOW);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, GEMSPEC, "name_project", name + "_project", Confidence.LOW);
|
||||
}
|
||||
addStringEvidence(product, contents, blockVariable, "summary", "summary", Confidence.LOW);
|
||||
addStringEvidence(dependency, EvidenceType.PRODUCT, contents, blockVariable, "summary", "summary", Confidence.LOW);
|
||||
|
||||
addStringEvidence(vendor, contents, blockVariable, "author", "authors?", Confidence.HIGHEST);
|
||||
addStringEvidence(vendor, contents, blockVariable, "email", "emails?", Confidence.MEDIUM);
|
||||
addStringEvidence(vendor, contents, blockVariable, "homepage", "homepage", Confidence.HIGHEST);
|
||||
addStringEvidence(vendor, contents, blockVariable, "license", "licen[cs]es?", Confidence.HIGHEST);
|
||||
addStringEvidence(dependency, EvidenceType.VENDOR, contents, blockVariable, "author", "authors?", Confidence.HIGHEST);
|
||||
addStringEvidence(dependency, EvidenceType.VENDOR, contents, blockVariable, "email", "emails?", Confidence.MEDIUM);
|
||||
addStringEvidence(dependency, EvidenceType.VENDOR, contents, blockVariable, "homepage", "homepage", Confidence.HIGHEST);
|
||||
addStringEvidence(dependency, EvidenceType.VENDOR, contents, blockVariable, "license", "licen[cs]es?", Confidence.HIGHEST);
|
||||
|
||||
final String value = addStringEvidence(dependency.getVersionEvidence(), contents,
|
||||
final String value = addStringEvidence(dependency, EvidenceType.VERSION, contents,
|
||||
blockVariable, "version", "version", Confidence.HIGHEST);
|
||||
if (value.length() < 1) {
|
||||
addEvidenceFromVersionFile(dependency.getActualFile(), dependency.getVersionEvidence());
|
||||
addEvidenceFromVersionFile(dependency, EvidenceType.VERSION, dependency.getActualFile());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -170,7 +169,8 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Adds the specified evidence to the given evidence collection.
|
||||
*
|
||||
* @param evidences the collection to add the evidence to
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param type the type of evidence to add
|
||||
* @param contents the evidence contents
|
||||
* @param blockVariable the variable
|
||||
* @param field the field
|
||||
@@ -178,7 +178,7 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @param confidence the confidence of the evidence
|
||||
* @return the evidence string value added
|
||||
*/
|
||||
private String addStringEvidence(EvidenceCollection evidences, String contents,
|
||||
private String addStringEvidence(Dependency dependency, EvidenceType type, String contents,
|
||||
String blockVariable, String field, String fieldPattern, Confidence confidence) {
|
||||
String value = "";
|
||||
|
||||
@@ -196,7 +196,7 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
}
|
||||
if (value.length() > 0) {
|
||||
evidences.addEvidence(GEMSPEC, field, value, confidence);
|
||||
dependency.addEvidence(type, GEMSPEC, field, value, confidence);
|
||||
}
|
||||
|
||||
return value;
|
||||
@@ -205,10 +205,11 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Adds evidence from the version file.
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param type the type of evidence to add
|
||||
* @param dependencyFile the dependency being analyzed
|
||||
* @param versionEvidences the version evidence
|
||||
*/
|
||||
private void addEvidenceFromVersionFile(File dependencyFile, EvidenceCollection versionEvidences) {
|
||||
private void addEvidenceFromVersionFile(Dependency dependency, EvidenceType type, File dependencyFile) {
|
||||
final File parentDir = dependencyFile.getParentFile();
|
||||
if (parentDir != null) {
|
||||
final File[] matchingFiles = parentDir.listFiles(new FilenameFilter() {
|
||||
@@ -225,7 +226,7 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final List<String> lines = FileUtils.readLines(f, Charset.defaultCharset());
|
||||
if (lines.size() == 1) { //TODO other checking?
|
||||
final String value = lines.get(0).trim();
|
||||
versionEvidences.addEvidence(GEMSPEC, "version", value, Confidence.HIGH);
|
||||
dependency.addEvidence(type, GEMSPEC, "version", value, Confidence.HIGH);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
LOGGER.debug("Error reading gemspec", e);
|
||||
|
||||
@@ -23,13 +23,14 @@ import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
@@ -41,6 +42,7 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
* @author Bianca Jiang (https://twitter.com/biancajiang)
|
||||
*/
|
||||
@Experimental
|
||||
@ThreadSafe
|
||||
public class SwiftPackageManagerAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -80,7 +82,7 @@ public class SwiftPackageManagerAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() {
|
||||
protected void prepareFileTypeAnalyzer(Engine engine) {
|
||||
// NO-OP
|
||||
}
|
||||
|
||||
@@ -133,14 +135,11 @@ public class SwiftPackageManagerAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
return;
|
||||
}
|
||||
|
||||
final EvidenceCollection product = dependency.getProductEvidence();
|
||||
final EvidenceCollection vendor = dependency.getVendorEvidence();
|
||||
|
||||
//SPM is currently under development for SWIFT 3. Its current metadata includes package name and dependencies.
|
||||
//Future interesting metadata: version, license, homepage, author, summary, etc.
|
||||
final String name = addStringEvidence(product, packageDescription, "name", "name", Confidence.HIGHEST);
|
||||
final String name = addStringEvidence(dependency, EvidenceType.PRODUCT, packageDescription, "name", "name", Confidence.HIGHEST);
|
||||
if (name != null && !name.isEmpty()) {
|
||||
vendor.addEvidence(SPM_FILE_NAME, "name_project", name, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, SPM_FILE_NAME, "name_project", name, Confidence.HIGHEST);
|
||||
}
|
||||
}
|
||||
setPackagePath(dependency);
|
||||
@@ -150,14 +149,15 @@ public class SwiftPackageManagerAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* Extracts evidence from the package description and adds it to the given
|
||||
* evidence collection.
|
||||
*
|
||||
* @param evidences the evidence collection to update
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param type the type of evidence to add
|
||||
* @param packageDescription the text to extract evidence from
|
||||
* @param field the name of the field being searched for
|
||||
* @param fieldPattern the field pattern within the contents to search for
|
||||
* @param confidence the confidence level of the evidence if found
|
||||
* @return the string that was added as evidence
|
||||
*/
|
||||
private String addStringEvidence(EvidenceCollection evidences,
|
||||
private String addStringEvidence(Dependency dependency, EvidenceType type,
|
||||
String packageDescription, String field, String fieldPattern, Confidence confidence) {
|
||||
String value = "";
|
||||
|
||||
@@ -170,7 +170,7 @@ public class SwiftPackageManagerAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if (value != null) {
|
||||
value = value.trim();
|
||||
if (value.length() > 0) {
|
||||
evidences.addEvidence(SPM_FILE_NAME, field, value, confidence);
|
||||
dependency.addEvidence(type, SPM_FILE_NAME, field, value, confidence);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -17,13 +17,15 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.HashSet;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersion;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
@@ -37,8 +39,14 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class VersionFilterAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger for use throughout the class
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(VersionFilterAnalyzer.class);
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Constants">
|
||||
/**
|
||||
* Evidence source.
|
||||
@@ -111,11 +119,6 @@ public class VersionFilterAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* The Logger for use throughout the class
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(VersionFilterAnalyzer.class);
|
||||
|
||||
/**
|
||||
* The HintAnalyzer uses knowledge about a dependency to add additional
|
||||
* information to help in identification of identifiers or vulnerabilities.
|
||||
@@ -126,18 +129,18 @@ public class VersionFilterAnalyzer extends AbstractAnalyzer {
|
||||
* the dependency.
|
||||
*/
|
||||
@Override
|
||||
protected synchronized void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
String fileVersion = null;
|
||||
String pomVersion = null;
|
||||
String manifestVersion = null;
|
||||
for (Evidence e : dependency.getVersionEvidence()) {
|
||||
for (Evidence e : dependency.getEvidence(EvidenceType.VERSION)) {
|
||||
if (FILE.equals(e.getSource()) && VERSION.equals(e.getName())) {
|
||||
fileVersion = e.getValue(Boolean.FALSE);
|
||||
fileVersion = e.getValue();
|
||||
} else if ((NEXUS.equals(e.getSource()) || CENTRAL.equals(e.getSource())
|
||||
|| POM.equals(e.getSource())) && VERSION.equals(e.getName())) {
|
||||
pomVersion = e.getValue(Boolean.FALSE);
|
||||
pomVersion = e.getValue();
|
||||
} else if (MANIFEST.equals(e.getSource()) && IMPLEMENTATION_VERSION.equals(e.getName())) {
|
||||
manifestVersion = e.getValue(Boolean.FALSE);
|
||||
manifestVersion = e.getValue();
|
||||
}
|
||||
}
|
||||
//ensure we have at least two not null
|
||||
@@ -150,17 +153,18 @@ public class VersionFilterAnalyzer extends AbstractAnalyzer {
|
||||
final boolean pomMatch = Objects.equals(dvPom, dvFile) || Objects.equals(dvPom, dvManifest);
|
||||
if (fileMatch || manifestMatch || pomMatch) {
|
||||
LOGGER.debug("filtering evidence from {}", dependency.getFileName());
|
||||
final EvidenceCollection versionEvidence = dependency.getVersionEvidence();
|
||||
final Iterator<Evidence> itr = versionEvidence.iterator();
|
||||
while (itr.hasNext()) {
|
||||
final Evidence e = itr.next();
|
||||
final Set<Evidence> remove = new HashSet<>();
|
||||
for (Evidence e : dependency.getEvidence(EvidenceType.VERSION)) {
|
||||
if (!(pomMatch && VERSION.equals(e.getName())
|
||||
&& (NEXUS.equals(e.getSource()) || CENTRAL.equals(e.getSource()) || POM.equals(e.getSource())))
|
||||
&& !(fileMatch && VERSION.equals(e.getName()) && FILE.equals(e.getSource()))
|
||||
&& !(manifestMatch && MANIFEST.equals(e.getSource()) && IMPLEMENTATION_VERSION.equals(e.getName()))) {
|
||||
itr.remove();
|
||||
remove.add(e);
|
||||
}
|
||||
}
|
||||
for (Evidence e : remove) {
|
||||
dependency.removeEvidence(EvidenceType.VERSION, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
@@ -26,6 +27,7 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class VulnerabilitySuppressionAnalyzer extends AbstractSuppressionAnalyzer {
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,11 +17,14 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer.exception;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* An exception thrown when the analysis of a dependency fails.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class AnalysisException extends Exception {
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,11 +17,14 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer.exception;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* An exception thrown when files in an archive cannot be extracted.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class ArchiveExtractionException extends Exception {
|
||||
|
||||
/**
|
||||
|
||||
@@ -20,9 +20,12 @@ package org.owasp.dependencycheck.data.central;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.xpath.XPath;
|
||||
@@ -44,12 +47,13 @@ import org.xml.sax.SAXException;
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class CentralSearch {
|
||||
|
||||
/**
|
||||
* The URL for the Central service
|
||||
*/
|
||||
private final URL rootURL;
|
||||
private final String rootURL;
|
||||
|
||||
/**
|
||||
* Whether to use the Proxy when making requests
|
||||
@@ -60,16 +64,28 @@ public class CentralSearch {
|
||||
* Used for logging.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(CentralSearch.class);
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private final Settings settings;
|
||||
|
||||
/**
|
||||
* Creates a NexusSearch for the given repository URL.
|
||||
*
|
||||
* @param rootURL the URL of the repository on which searches should
|
||||
* execute. Only parameters are added to this (so it should end in /select)
|
||||
* @param settings the configured settings
|
||||
* @throws MalformedURLException thrown if the configured URL is
|
||||
* invalid
|
||||
*/
|
||||
public CentralSearch(URL rootURL) {
|
||||
this.rootURL = rootURL;
|
||||
if (null != Settings.getString(Settings.KEYS.PROXY_SERVER)) {
|
||||
public CentralSearch(Settings settings) throws MalformedURLException {
|
||||
this.settings = settings;
|
||||
|
||||
final String searchUrl = settings.getString(Settings.KEYS.ANALYZER_CENTRAL_URL);
|
||||
LOGGER.debug("Central Search URL: {}", searchUrl);
|
||||
if (isInvalidURL(searchUrl)) {
|
||||
throw new MalformedURLException(String.format("The configured central analyzer URL is invalid: %s", searchUrl));
|
||||
}
|
||||
this.rootURL = searchUrl;
|
||||
if (null != settings.getString(Settings.KEYS.PROXY_SERVER)) {
|
||||
useProxy = true;
|
||||
LOGGER.debug("Using proxy");
|
||||
} else {
|
||||
@@ -93,7 +109,7 @@ public class CentralSearch {
|
||||
throw new IllegalArgumentException("Invalid SHA1 format");
|
||||
}
|
||||
List<MavenArtifact> result = null;
|
||||
final URL url = new URL(rootURL + String.format("?q=1:\"%s\"&wt=xml", sha1));
|
||||
final URL url = new URL(String.format("%s?q=1:\"%s\"&wt=xml", rootURL, sha1));
|
||||
|
||||
LOGGER.debug("Searching Central url {}", url);
|
||||
|
||||
@@ -101,7 +117,8 @@ public class CentralSearch {
|
||||
// 1) If the proxy is set, AND the setting is set to true, use the proxy
|
||||
// 2) Otherwise, don't use the proxy (either the proxy isn't configured,
|
||||
// or proxy is specifically set to false)
|
||||
final HttpURLConnection conn = URLConnectionFactory.createHttpURLConnection(url, useProxy);
|
||||
final URLConnectionFactory factory = new URLConnectionFactory(settings);
|
||||
final HttpURLConnection conn = factory.createHttpURLConnection(url, useProxy);
|
||||
|
||||
conn.setDoOutput(true);
|
||||
|
||||
@@ -167,4 +184,21 @@ public class CentralSearch {
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests to determine if the gien URL is <b>invalid</b>.
|
||||
*
|
||||
* @param url the url to evaluate
|
||||
* @return true if the url is malformed; otherwise false
|
||||
*/
|
||||
private boolean isInvalidURL(String url) {
|
||||
try {
|
||||
final URL u = new URL(url);
|
||||
u.toURI();
|
||||
} catch (MalformedURLException | URISyntaxException e) {
|
||||
LOGGER.trace("URL is invalid: {}", url);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,11 +17,14 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.composer;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* Represents a dependency (GAV, right now) from a Composer dependency.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
@ThreadSafe
|
||||
public final class ComposerDependency {
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,11 +17,15 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.composer;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* Represents an exception when handling a composer.json or composer.lock file. Generally used to wrap a downstream exception.
|
||||
* Represents an exception when handling a composer.json or composer.lock file.
|
||||
* Generally used to wrap a downstream exception.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class ComposerException extends RuntimeException {
|
||||
|
||||
/**
|
||||
|
||||
@@ -29,12 +29,14 @@ import javax.json.stream.JsonParsingException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
|
||||
/**
|
||||
* Parses a Composer.lock file from an input stream. In a separate class so it can hopefully be injected.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
@NotThreadSafe
|
||||
public class ComposerLockParser {
|
||||
|
||||
/**
|
||||
|
||||
@@ -21,6 +21,8 @@ import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper;
|
||||
@@ -47,21 +49,29 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* An in memory Lucene index that contains the vendor/product combinations from
|
||||
* the CPE (application) identifiers within the NVD CVE data.
|
||||
* the CPE (application) identifiers within the NVD CVE data.</p>
|
||||
*
|
||||
* This is the last remaining singleton in dependency-check-core; The use of
|
||||
* this singleton - while it may not technically be thread-safe (one database
|
||||
* used to build this index may not have the same entries as another) the risk
|
||||
* of this is currently believed to be small. As this memory index consumes a
|
||||
* large amount of memory we will remain using the singleton pattern for now.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public final class CpeMemoryIndex implements AutoCloseable {
|
||||
|
||||
/**
|
||||
* Singleton instance.
|
||||
*/
|
||||
private static final CpeMemoryIndex INSTANCE = new CpeMemoryIndex();
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(CpeMemoryIndex.class);
|
||||
/**
|
||||
* singleton instance.
|
||||
*/
|
||||
private static final CpeMemoryIndex INSTANCE = new CpeMemoryIndex();
|
||||
/**
|
||||
* The in memory Lucene index.
|
||||
*/
|
||||
@@ -82,19 +92,11 @@ public final class CpeMemoryIndex implements AutoCloseable {
|
||||
* The Lucene QueryParser used for Searching.
|
||||
*/
|
||||
private QueryParser queryParser;
|
||||
/**
|
||||
* The search field analyzer for the product field.
|
||||
*/
|
||||
private SearchFieldAnalyzer productFieldAnalyzer;
|
||||
/**
|
||||
* The search field analyzer for the vendor field.
|
||||
*/
|
||||
private SearchFieldAnalyzer vendorFieldAnalyzer;
|
||||
/**
|
||||
* Track the number of current users of the Lucene index; used to track it
|
||||
* it is okay to actually close the index.
|
||||
*/
|
||||
private int usageCount = 0;
|
||||
private final AtomicInteger usageCount = new AtomicInteger(0);
|
||||
|
||||
/**
|
||||
* private constructor for singleton.
|
||||
@@ -118,8 +120,7 @@ public final class CpeMemoryIndex implements AutoCloseable {
|
||||
* @throws IndexException thrown if there is an error creating the index
|
||||
*/
|
||||
public synchronized void open(CveDB cve) throws IndexException {
|
||||
if (INSTANCE.usageCount <= 0) {
|
||||
INSTANCE.usageCount = 0;
|
||||
if (INSTANCE.usageCount.addAndGet(1) == 1) {
|
||||
index = new RAMDirectory();
|
||||
buildIndex(cve);
|
||||
try {
|
||||
@@ -131,7 +132,6 @@ public final class CpeMemoryIndex implements AutoCloseable {
|
||||
searchingAnalyzer = createSearchingAnalyzer();
|
||||
queryParser = new QueryParser(LuceneUtils.CURRENT_VERSION, Fields.DOCUMENT_KEY, searchingAnalyzer);
|
||||
}
|
||||
INSTANCE.usageCount += 1;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -140,7 +140,7 @@ public final class CpeMemoryIndex implements AutoCloseable {
|
||||
* @return whether or not the index is open
|
||||
*/
|
||||
public synchronized boolean isOpen() {
|
||||
return INSTANCE.usageCount > 0;
|
||||
return INSTANCE.usageCount.get() > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -151,8 +151,8 @@ public final class CpeMemoryIndex implements AutoCloseable {
|
||||
private Analyzer createSearchingAnalyzer() {
|
||||
final Map<String, Analyzer> fieldAnalyzers = new HashMap<>();
|
||||
fieldAnalyzers.put(Fields.DOCUMENT_KEY, new KeywordAnalyzer());
|
||||
productFieldAnalyzer = new SearchFieldAnalyzer(LuceneUtils.CURRENT_VERSION);
|
||||
vendorFieldAnalyzer = new SearchFieldAnalyzer(LuceneUtils.CURRENT_VERSION);
|
||||
final SearchFieldAnalyzer productFieldAnalyzer = new SearchFieldAnalyzer(LuceneUtils.CURRENT_VERSION);
|
||||
final SearchFieldAnalyzer vendorFieldAnalyzer = new SearchFieldAnalyzer(LuceneUtils.CURRENT_VERSION);
|
||||
fieldAnalyzers.put(Fields.PRODUCT, productFieldAnalyzer);
|
||||
fieldAnalyzers.put(Fields.VENDOR, vendorFieldAnalyzer);
|
||||
|
||||
@@ -164,8 +164,9 @@ public final class CpeMemoryIndex implements AutoCloseable {
|
||||
*/
|
||||
@Override
|
||||
public synchronized void close() {
|
||||
INSTANCE.usageCount -= 1;
|
||||
if (INSTANCE.usageCount <= 0) {
|
||||
final int count = INSTANCE.usageCount.get() - 1;
|
||||
if (count <= 0) {
|
||||
INSTANCE.usageCount.set(0);
|
||||
if (searchingAnalyzer != null) {
|
||||
searchingAnalyzer.close();
|
||||
searchingAnalyzer = null;
|
||||
@@ -218,8 +219,6 @@ public final class CpeMemoryIndex implements AutoCloseable {
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new IndexException("Error reading CPE data", ex);
|
||||
} catch (CorruptIndexException ex) {
|
||||
throw new IndexException("Unable to close an in-memory index", ex);
|
||||
} catch (IOException ex) {
|
||||
throw new IndexException("Unable to close an in-memory index", ex);
|
||||
}
|
||||
|
||||
@@ -17,11 +17,15 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.cpe;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* Fields is a collection of field names used within the Lucene index for CPE entries.
|
||||
* Fields is a collection of field names used within the Lucene index for CPE
|
||||
* entries.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public final class Fields {
|
||||
|
||||
/**
|
||||
@@ -38,7 +42,8 @@ public final class Fields {
|
||||
public static final String PRODUCT = "product";
|
||||
|
||||
/**
|
||||
* Private constructor as this is more of an enumeration rather then a full class.
|
||||
* Private constructor as this is more of an enumeration rather then a full
|
||||
* class.
|
||||
*/
|
||||
private Fields() {
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ package org.owasp.dependencycheck.data.cpe;
|
||||
import java.io.Serializable;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLDecoder;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -27,6 +28,7 @@ import org.apache.commons.lang3.StringUtils;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class IndexEntry implements Serializable {
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,11 +17,14 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.cpe;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* An exception thrown when the there is an issue using the in-memory CPE Index.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class IndexException extends Exception {
|
||||
|
||||
/**
|
||||
|
||||
@@ -26,11 +26,13 @@ import java.io.InputStream;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public final class CweDB {
|
||||
|
||||
/**
|
||||
@@ -59,9 +61,7 @@ public final class CweDB {
|
||||
final String filePath = "data/cwe.hashmap.serialized";
|
||||
try (InputStream input = FileUtils.getResourceAsStream(filePath);
|
||||
ObjectInputStream oin = new ObjectInputStream(input)) {
|
||||
|
||||
final Map<String, String> ret = (HashMap<String, String>) oin.readObject();
|
||||
return ret;
|
||||
return (HashMap<String, String>) oin.readObject();
|
||||
} catch (ClassNotFoundException ex) {
|
||||
LOGGER.warn("Unable to load CWE data. This should not be an issue.");
|
||||
LOGGER.debug("", ex);
|
||||
@@ -79,7 +79,7 @@ public final class CweDB {
|
||||
* @param cweId the CWE ID
|
||||
* @return the full name of the CWE
|
||||
*/
|
||||
public static String getCweName(String cweId) {
|
||||
public static synchronized String getCweName(String cweId) {
|
||||
if (cweId != null) {
|
||||
return CWE.get(cweId);
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
package org.owasp.dependencycheck.data.cwe;
|
||||
|
||||
import java.util.HashMap;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
import org.xml.sax.Attributes;
|
||||
import org.xml.sax.SAXException;
|
||||
import org.xml.sax.helpers.DefaultHandler;
|
||||
@@ -27,6 +28,7 @@ import org.xml.sax.helpers.DefaultHandler;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@NotThreadSafe
|
||||
public class CweHandler extends DefaultHandler {
|
||||
|
||||
/**
|
||||
|
||||
@@ -18,15 +18,18 @@
|
||||
package org.owasp.dependencycheck.data.lucene;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
import org.apache.lucene.analysis.TokenFilter;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
|
||||
/**
|
||||
* An abstract tokenizing filter that can be used as the base for a tokenizing filter.
|
||||
* An abstract tokenizing filter that can be used as the base for a tokenizing
|
||||
* filter.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@NotThreadSafe
|
||||
public abstract class AbstractTokenizingFilter extends TokenFilter {
|
||||
|
||||
/**
|
||||
|
||||
@@ -18,14 +18,17 @@
|
||||
package org.owasp.dependencycheck.data.lucene;
|
||||
|
||||
import java.io.Reader;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
import org.apache.lucene.analysis.util.CharTokenizer;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* Tokenizes the input breaking it into tokens when non-alpha/numeric characters are found.
|
||||
* Tokenizes the input breaking it into tokens when non-alpha/numeric characters
|
||||
* are found.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@NotThreadSafe
|
||||
public class AlphaNumericTokenizer extends CharTokenizer {
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,12 +17,14 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.lucene;
|
||||
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
import org.apache.lucene.search.similarities.DefaultSimilarity;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@NotThreadSafe
|
||||
public class DependencySimilarity extends DefaultSimilarity {
|
||||
|
||||
/**
|
||||
|
||||
@@ -18,19 +18,22 @@
|
||||
package org.owasp.dependencycheck.data.lucene;
|
||||
|
||||
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Lucene utils is a set of utilize written to make constructing Lucene queries simpler.</p>
|
||||
* Lucene utils is a set of utilize written to make constructing Lucene queries
|
||||
* simpler.</p>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public final class LuceneUtils {
|
||||
|
||||
/**
|
||||
* The current version of Lucene being used. Declaring this one place so an upgrade doesn't require hunting through the code
|
||||
* base.
|
||||
* The current version of Lucene being used. Declaring this one place so an
|
||||
* upgrade doesn't require hunting through the code base.
|
||||
*/
|
||||
public static final Version CURRENT_VERSION = Version.LUCENE_47;
|
||||
|
||||
@@ -41,7 +44,8 @@ public final class LuceneUtils {
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends the text to the supplied StringBuilder escaping Lucene control characters in the process.
|
||||
* Appends the text to the supplied StringBuilder escaping Lucene control
|
||||
* characters in the process.
|
||||
*
|
||||
* @param buf a StringBuilder to append the escaped text to
|
||||
* @param text the data to be escaped
|
||||
@@ -88,7 +92,8 @@ public final class LuceneUtils {
|
||||
}
|
||||
|
||||
/**
|
||||
* Escapes the text passed in so that it is treated as data instead of control characters.
|
||||
* Escapes the text passed in so that it is treated as data instead of
|
||||
* control characters.
|
||||
*
|
||||
* @param text data to be escaped
|
||||
* @return the escaped text.
|
||||
|
||||
@@ -18,8 +18,6 @@
|
||||
package org.owasp.dependencycheck.data.lucene;
|
||||
|
||||
import java.io.Reader;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
@@ -44,8 +42,8 @@ public class SearchFieldAnalyzer extends Analyzer {
|
||||
/**
|
||||
* The list of additional stop words to use.
|
||||
*/
|
||||
private static final List<String> ADDITIONAL_STOP_WORDS = Arrays.asList("software", "framework", "inc",
|
||||
"com", "org", "net", "www", "consulting", "ltd", "foundation", "project");
|
||||
private static final String[] ADDITIONAL_STOP_WORDS = {"software", "framework", "inc",
|
||||
"com", "org", "net", "www", "consulting", "ltd", "foundation", "project"};
|
||||
/**
|
||||
* The set of stop words to use in the analyzer.
|
||||
*/
|
||||
@@ -57,8 +55,8 @@ public class SearchFieldAnalyzer extends Analyzer {
|
||||
* @return the set of stop words being used
|
||||
*/
|
||||
public static CharArraySet getStopWords() {
|
||||
CharArraySet words = new CharArraySet(LuceneUtils.CURRENT_VERSION, StopAnalyzer.ENGLISH_STOP_WORDS_SET, true);
|
||||
words.addAll(ADDITIONAL_STOP_WORDS);
|
||||
final CharArraySet words = StopFilter.makeStopSet(LuceneUtils.CURRENT_VERSION, ADDITIONAL_STOP_WORDS, true);
|
||||
words.addAll(StopAnalyzer.ENGLISH_STOP_WORDS_SET);
|
||||
return words;
|
||||
}
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ package org.owasp.dependencycheck.data.lucene;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.LinkedList;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
import org.apache.lucene.analysis.TokenFilter;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
@@ -33,6 +34,7 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@NotThreadSafe
|
||||
public final class TokenPairConcatenatingFilter extends TokenFilter {
|
||||
|
||||
/**
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.owasp.dependencycheck.utils.UrlStringUtils;
|
||||
@@ -28,13 +29,12 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Takes a TokenStream and splits or adds tokens to correctly index version numbers.</p>
|
||||
* <p>
|
||||
* <b>Example:</b> "3.0.0.RELEASE" -> "3 3.0 3.0.0 RELEASE 3.0.0.RELEASE".</p>
|
||||
*
|
||||
* Takes a TokenStream, looks for URLs, and breaks them into separate tokens.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@NotThreadSafe
|
||||
public final class UrlTokenizingFilter extends AbstractTokenizingFilter {
|
||||
|
||||
/**
|
||||
@@ -52,8 +52,9 @@ public final class UrlTokenizingFilter extends AbstractTokenizingFilter {
|
||||
}
|
||||
|
||||
/**
|
||||
* Increments the underlying TokenStream and sets CharTermAttributes to construct an expanded set of tokens by concatenating
|
||||
* tokens with the previous token.
|
||||
* Increments the underlying TokenStream and sets CharTermAttributes to
|
||||
* construct an expanded set of tokens by concatenating tokens with the
|
||||
* previous token.
|
||||
*
|
||||
* @return whether or not we have hit the end of the TokenStream
|
||||
* @throws IOException is thrown when an IOException occurs
|
||||
|
||||
@@ -17,11 +17,14 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.nexus;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* Simple bean representing a Maven Artifact.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class MavenArtifact {
|
||||
|
||||
/**
|
||||
@@ -45,7 +48,8 @@ public class MavenArtifact {
|
||||
private String version;
|
||||
|
||||
/**
|
||||
* The artifact url. This may change depending on which Nexus server the search took place.
|
||||
* The artifact url. This may change depending on which Nexus server the
|
||||
* search took place.
|
||||
*/
|
||||
private String artifactUrl;
|
||||
/**
|
||||
@@ -80,7 +84,8 @@ public class MavenArtifact {
|
||||
* @param version the version
|
||||
* @param jarAvailable if the jar file is available from central
|
||||
* @param pomAvailable if the pom file is available from central
|
||||
* @param secureDownload if the jar and pom files should be downloaded using HTTPS.
|
||||
* @param secureDownload if the jar and pom files should be downloaded using
|
||||
* HTTPS.
|
||||
*/
|
||||
public MavenArtifact(String groupId, String artifactId, String version, boolean jarAvailable, boolean pomAvailable, boolean secureDownload) {
|
||||
this.groupId = groupId;
|
||||
@@ -220,5 +225,3 @@ public class MavenArtifact {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// vim: cc=120:sw=4:ts=4:sts=4
|
||||
|
||||
@@ -20,12 +20,15 @@ package org.owasp.dependencycheck.data.nexus;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
import org.owasp.dependencycheck.utils.URLConnectionFactory;
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
@@ -39,6 +42,7 @@ import org.xml.sax.SAXException;
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class NexusSearch {
|
||||
|
||||
/**
|
||||
@@ -50,6 +54,10 @@ public class NexusSearch {
|
||||
* Whether to use the Proxy when making requests.
|
||||
*/
|
||||
private final boolean useProxy;
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private final Settings settings;
|
||||
/**
|
||||
* Used for logging.
|
||||
*/
|
||||
@@ -58,15 +66,19 @@ public class NexusSearch {
|
||||
/**
|
||||
* Creates a NexusSearch for the given repository URL.
|
||||
*
|
||||
* @param rootURL the root URL of the repository on which searches should
|
||||
* execute. full URL's are calculated relative to this URL, so it should end
|
||||
* with a /
|
||||
* @param settings the configured settings
|
||||
* @param useProxy flag indicating if the proxy settings should be used
|
||||
* @throws java.net.MalformedURLException thrown if the configured URL is
|
||||
* invalid
|
||||
*/
|
||||
public NexusSearch(URL rootURL, boolean useProxy) {
|
||||
this.rootURL = rootURL;
|
||||
public NexusSearch(Settings settings, boolean useProxy) throws MalformedURLException {
|
||||
this.settings = settings;
|
||||
this.useProxy = useProxy;
|
||||
LOGGER.debug("Using proxy: {}", useProxy);
|
||||
|
||||
final String searchUrl = settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL);
|
||||
LOGGER.debug("Nexus Search URL: {}", searchUrl);
|
||||
this.rootURL = new URL(searchUrl);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -94,7 +106,8 @@ public class NexusSearch {
|
||||
// 2) Otherwise, don't use the proxy (either the proxy isn't configured,
|
||||
// or proxy is specifically set to false
|
||||
HttpURLConnection conn;
|
||||
conn = URLConnectionFactory.createHttpURLConnection(url, useProxy);
|
||||
final URLConnectionFactory factory = new URLConnectionFactory(settings);
|
||||
conn = factory.createHttpURLConnection(url, useProxy);
|
||||
conn.setDoOutput(true);
|
||||
|
||||
// JSON would be more elegant, but there's not currently a dependency
|
||||
@@ -159,7 +172,8 @@ public class NexusSearch {
|
||||
HttpURLConnection conn;
|
||||
try {
|
||||
final URL url = new URL(rootURL, "status");
|
||||
conn = URLConnectionFactory.createHttpURLConnection(url, useProxy);
|
||||
final URLConnectionFactory factory = new URLConnectionFactory(settings);
|
||||
conn = factory.createHttpURLConnection(url, useProxy);
|
||||
conn.addRequestProperty("Accept", "application/xml");
|
||||
conn.connect();
|
||||
if (conn.getResponseCode() != 200) {
|
||||
@@ -176,9 +190,6 @@ public class NexusSearch {
|
||||
} catch (IOException | ParserConfigurationException | SAXException e) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// vim: cc=120:sw=4:ts=4:sts=4
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
package org.owasp.dependencycheck.data.nsp;
|
||||
|
||||
import java.util.Arrays;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* The response from NSP check API will respond with 0 or more advisories. This
|
||||
@@ -25,6 +26,7 @@ import java.util.Arrays;
|
||||
*
|
||||
* @author Steve Springett
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class Advisory {
|
||||
|
||||
/**
|
||||
|
||||
@@ -23,10 +23,12 @@ import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.URLConnectionFactory;
|
||||
@@ -36,6 +38,8 @@ import javax.json.Json;
|
||||
import javax.json.JsonArray;
|
||||
import javax.json.JsonObject;
|
||||
import javax.json.JsonReader;
|
||||
import javax.json.JsonValue.ValueType;
|
||||
import static org.owasp.dependencycheck.analyzer.NspAnalyzer.DEFAULT_URL;
|
||||
import org.owasp.dependencycheck.utils.URLConnectionFailureException;
|
||||
|
||||
/**
|
||||
@@ -43,6 +47,7 @@ import org.owasp.dependencycheck.utils.URLConnectionFailureException;
|
||||
*
|
||||
* @author Steve Springett
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class NspSearch {
|
||||
|
||||
/**
|
||||
@@ -54,7 +59,10 @@ public class NspSearch {
|
||||
* Whether to use the Proxy when making requests.
|
||||
*/
|
||||
private final boolean useProxy;
|
||||
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private final Settings settings;
|
||||
/**
|
||||
* Used for logging.
|
||||
*/
|
||||
@@ -63,11 +71,16 @@ public class NspSearch {
|
||||
/**
|
||||
* Creates a NspSearch for the given repository URL.
|
||||
*
|
||||
* @param nspCheckUrl the URL to the public NSP check API
|
||||
* @param settings the configured settings
|
||||
* @throws java.net.MalformedURLException thrown if the configured URL is
|
||||
* invalid
|
||||
*/
|
||||
public NspSearch(URL nspCheckUrl) {
|
||||
this.nspCheckUrl = nspCheckUrl;
|
||||
if (null != Settings.getString(Settings.KEYS.PROXY_SERVER)) {
|
||||
public NspSearch(Settings settings) throws MalformedURLException {
|
||||
final String searchUrl = settings.getString(Settings.KEYS.ANALYZER_NSP_URL, DEFAULT_URL);
|
||||
LOGGER.debug("NSP Search URL: {}", searchUrl);
|
||||
this.nspCheckUrl = new URL(searchUrl);
|
||||
this.settings = settings;
|
||||
if (null != settings.getString(Settings.KEYS.PROXY_SERVER)) {
|
||||
useProxy = true;
|
||||
LOGGER.debug("Using proxy");
|
||||
} else {
|
||||
@@ -90,8 +103,8 @@ public class NspSearch {
|
||||
try {
|
||||
final List<Advisory> result = new ArrayList<>();
|
||||
final byte[] packageDatabytes = packageJson.toString().getBytes(StandardCharsets.UTF_8);
|
||||
|
||||
final HttpURLConnection conn = URLConnectionFactory.createHttpURLConnection(nspCheckUrl, useProxy);
|
||||
final URLConnectionFactory factory = new URLConnectionFactory(settings);
|
||||
final HttpURLConnection conn = factory.createHttpURLConnection(nspCheckUrl, useProxy);
|
||||
conn.setDoOutput(true);
|
||||
conn.setDoInput(true);
|
||||
conn.setRequestMethod("POST");
|
||||
@@ -121,7 +134,13 @@ public class NspSearch {
|
||||
advisory.setOverview(object.getString("overview"));
|
||||
advisory.setRecommendation(object.getString("recommendation", null));
|
||||
advisory.setCvssVector(object.getString("cvss_vector", null));
|
||||
advisory.setCvssScore(Float.parseFloat(object.getJsonNumber("cvss_score").toString()));
|
||||
|
||||
if (object.get("cvss_score").getValueType() != ValueType.NULL) {
|
||||
advisory.setCvssScore(Float.parseFloat(object.getJsonNumber("cvss_score").toString()));
|
||||
} else {
|
||||
advisory.setCvssScore(-1);
|
||||
}
|
||||
|
||||
advisory.setModule(object.getString("module", null));
|
||||
advisory.setVersion(object.getString("version", null));
|
||||
advisory.setVulnerableVersions(object.getString("vulnerable_versions", null));
|
||||
@@ -141,6 +160,7 @@ public class NspSearch {
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case 400:
|
||||
LOGGER.debug("Invalid payload submitted to Node Security Platform. Received response code: {} {}",
|
||||
conn.getResponseCode(), conn.getResponseMessage());
|
||||
|
||||
@@ -25,6 +25,7 @@ import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* Class used to create a Sanitized version of package.json suitable for
|
||||
@@ -32,6 +33,7 @@ import java.util.Map;
|
||||
*
|
||||
* @author Steve Springett
|
||||
*/
|
||||
@ThreadSafe
|
||||
public final class SanitizePackage {
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,12 +17,16 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.nuget;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* Represents the contents of a Nuspec manifest.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class NugetPackage {
|
||||
|
||||
/**
|
||||
* The id.
|
||||
*/
|
||||
@@ -55,6 +59,7 @@ public class NugetPackage {
|
||||
|
||||
/**
|
||||
* Sets the id.
|
||||
*
|
||||
* @param id the id
|
||||
*/
|
||||
public void setId(String id) {
|
||||
@@ -63,6 +68,7 @@ public class NugetPackage {
|
||||
|
||||
/**
|
||||
* Gets the id.
|
||||
*
|
||||
* @return the id
|
||||
*/
|
||||
public String getId() {
|
||||
@@ -71,6 +77,7 @@ public class NugetPackage {
|
||||
|
||||
/**
|
||||
* Sets the version.
|
||||
*
|
||||
* @param version the version
|
||||
*/
|
||||
public void setVersion(String version) {
|
||||
@@ -79,6 +86,7 @@ public class NugetPackage {
|
||||
|
||||
/**
|
||||
* Gets the version.
|
||||
*
|
||||
* @return the version
|
||||
*/
|
||||
public String getVersion() {
|
||||
@@ -87,6 +95,7 @@ public class NugetPackage {
|
||||
|
||||
/**
|
||||
* Sets the title.
|
||||
*
|
||||
* @param title the title
|
||||
*/
|
||||
public void setTitle(String title) {
|
||||
@@ -95,6 +104,7 @@ public class NugetPackage {
|
||||
|
||||
/**
|
||||
* Gets the title.
|
||||
*
|
||||
* @return the title
|
||||
*/
|
||||
public String getTitle() {
|
||||
@@ -103,6 +113,7 @@ public class NugetPackage {
|
||||
|
||||
/**
|
||||
* Sets the authors.
|
||||
*
|
||||
* @param authors the authors
|
||||
*/
|
||||
public void setAuthors(String authors) {
|
||||
@@ -111,6 +122,7 @@ public class NugetPackage {
|
||||
|
||||
/**
|
||||
* Gets the authors.
|
||||
*
|
||||
* @return the authors
|
||||
*/
|
||||
public String getAuthors() {
|
||||
@@ -119,6 +131,7 @@ public class NugetPackage {
|
||||
|
||||
/**
|
||||
* Sets the owners.
|
||||
*
|
||||
* @param owners the owners
|
||||
*/
|
||||
public void setOwners(String owners) {
|
||||
@@ -127,6 +140,7 @@ public class NugetPackage {
|
||||
|
||||
/**
|
||||
* Gets the owners.
|
||||
*
|
||||
* @return the owners
|
||||
*/
|
||||
public String getOwners() {
|
||||
@@ -135,6 +149,7 @@ public class NugetPackage {
|
||||
|
||||
/**
|
||||
* Sets the licenseUrl.
|
||||
*
|
||||
* @param licenseUrl the licenseUrl
|
||||
*/
|
||||
public void setLicenseUrl(String licenseUrl) {
|
||||
@@ -143,6 +158,7 @@ public class NugetPackage {
|
||||
|
||||
/**
|
||||
* Gets the licenseUrl.
|
||||
*
|
||||
* @return the licenseUrl
|
||||
*/
|
||||
public String getLicenseUrl() {
|
||||
|
||||
@@ -17,11 +17,14 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.nuget;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* Exception during the parsing of a Nuspec file.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class NuspecParseException extends Exception {
|
||||
|
||||
/**
|
||||
@@ -32,19 +35,20 @@ public class NuspecParseException extends Exception {
|
||||
/**
|
||||
* Constructs a new exception with <code>null</code> as its detail message.
|
||||
*
|
||||
* The cause is not initialized, and may subsequently be initialized by a call to
|
||||
* {@link java.lang.Throwable#initCause(java.lang.Throwable)}.
|
||||
* The cause is not initialized, and may subsequently be initialized by a
|
||||
* call to {@link java.lang.Throwable#initCause(java.lang.Throwable)}.
|
||||
*/
|
||||
public NuspecParseException() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new exception with the specified detail message. The cause is not initialized, and may subsequently
|
||||
* be initialized by a call to {@link java.lang.Throwable#initCause(java.lang.Throwable)}.
|
||||
* Constructs a new exception with the specified detail message. The cause
|
||||
* is not initialized, and may subsequently be initialized by a call to
|
||||
* {@link java.lang.Throwable#initCause(java.lang.Throwable)}.
|
||||
*
|
||||
* @param message the detail message. The detail message is saved for later retrieval by the
|
||||
* {@link java.lang.Throwable#getMessage()} method.
|
||||
* @param message the detail message. The detail message is saved for later
|
||||
* retrieval by the {@link java.lang.Throwable#getMessage()} method.
|
||||
*/
|
||||
public NuspecParseException(String message) {
|
||||
super(message);
|
||||
@@ -53,13 +57,16 @@ public class NuspecParseException extends Exception {
|
||||
/**
|
||||
* Constructs a new exception with the specified detail message and cause.
|
||||
*
|
||||
* Note that the detail message associated with <code>cause</code> is <em>not</em>
|
||||
* Note that the detail message associated with <code>cause</code> is
|
||||
* <em>not</em>
|
||||
* automatically incorporated in this exception's detail message.
|
||||
*
|
||||
* @param message the detail message (which is saved for later retrieval by the
|
||||
* {@link java.lang.Throwable#getMessage()} method.
|
||||
* @param cause the cause (which is saved for later retrieval by the {@link java.lang.Throwable#getCause()} method).
|
||||
* (A <code>null</code> value is permitted, and indicates that the cause is nonexistent or unknown).
|
||||
* @param message the detail message (which is saved for later retrieval by
|
||||
* the {@link java.lang.Throwable#getMessage()} method.
|
||||
* @param cause the cause (which is saved for later retrieval by the
|
||||
* {@link java.lang.Throwable#getCause()} method). (A <code>null</code>
|
||||
* value is permitted, and indicates that the cause is nonexistent or
|
||||
* unknown).
|
||||
*/
|
||||
public NuspecParseException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
|
||||
@@ -26,6 +26,7 @@ import java.io.InputStream;
|
||||
*
|
||||
*/
|
||||
public interface NuspecParser {
|
||||
|
||||
/**
|
||||
* Parse an input stream and return the resulting {@link NugetPackage}.
|
||||
*
|
||||
|
||||
@@ -19,6 +19,7 @@ package org.owasp.dependencycheck.data.nuget;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.xpath.XPath;
|
||||
@@ -35,6 +36,7 @@ import org.xml.sax.SAXException;
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class XPathNuspecParser implements NuspecParser {
|
||||
|
||||
/**
|
||||
|
||||
@@ -27,6 +27,7 @@ import java.sql.DriverManager;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.owasp.dependencycheck.utils.DBUtils;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersion;
|
||||
@@ -44,16 +45,13 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public final class ConnectionFactory {
|
||||
|
||||
/**
|
||||
* The Logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(ConnectionFactory.class);
|
||||
/**
|
||||
* The version of the current DB Schema.
|
||||
*/
|
||||
public static final String DB_SCHEMA_VERSION = Settings.getString(Settings.KEYS.DB_VERSION);
|
||||
/**
|
||||
* Resource location for SQL file used to create the database schema.
|
||||
*/
|
||||
@@ -69,29 +67,36 @@ public final class ConnectionFactory {
|
||||
/**
|
||||
* The database driver used to connect to the database.
|
||||
*/
|
||||
private static Driver driver = null;
|
||||
private Driver driver = null;
|
||||
/**
|
||||
* The database connection string.
|
||||
*/
|
||||
private static String connectionString = null;
|
||||
private String connectionString = null;
|
||||
/**
|
||||
* The username to connect to the database.
|
||||
*/
|
||||
private static String userName = null;
|
||||
private String userName = null;
|
||||
/**
|
||||
* The password for the database.
|
||||
*/
|
||||
private static String password = null;
|
||||
private String password = null;
|
||||
/**
|
||||
* Counter to ensure that calls to ensureSchemaVersion does not end up in an
|
||||
* endless loop.
|
||||
*/
|
||||
private static int callDepth = 0;
|
||||
private int callDepth = 0;
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private final Settings settings;
|
||||
|
||||
/**
|
||||
* Private constructor for this factory class; no instance is ever needed.
|
||||
*
|
||||
* @param settings the configured settings
|
||||
*/
|
||||
private ConnectionFactory() {
|
||||
public ConnectionFactory(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -101,7 +106,7 @@ public final class ConnectionFactory {
|
||||
* @throws DatabaseException thrown if we are unable to connect to the
|
||||
* database
|
||||
*/
|
||||
public static void initialize() throws DatabaseException {
|
||||
public synchronized void initialize() throws DatabaseException {
|
||||
//this only needs to be called once.
|
||||
if (connectionString != null) {
|
||||
return;
|
||||
@@ -109,27 +114,23 @@ public final class ConnectionFactory {
|
||||
Connection conn = null;
|
||||
try {
|
||||
//load the driver if necessary
|
||||
final String driverName = Settings.getString(Settings.KEYS.DB_DRIVER_NAME, "");
|
||||
if (!driverName.isEmpty()) { //likely need to load the correct driver
|
||||
LOGGER.debug("Loading driver: {}", driverName);
|
||||
final String driverPath = Settings.getString(Settings.KEYS.DB_DRIVER_PATH, "");
|
||||
final String driverName = settings.getString(Settings.KEYS.DB_DRIVER_NAME, "");
|
||||
final String driverPath = settings.getString(Settings.KEYS.DB_DRIVER_PATH, "");
|
||||
if (!driverPath.isEmpty()) {
|
||||
LOGGER.debug("Loading driver '{}' from '{}'", driverName, driverPath);
|
||||
try {
|
||||
if (!driverPath.isEmpty()) {
|
||||
LOGGER.debug("Loading driver from: {}", driverPath);
|
||||
driver = DriverLoader.load(driverName, driverPath);
|
||||
} else {
|
||||
driver = DriverLoader.load(driverName);
|
||||
}
|
||||
LOGGER.debug("Loading driver from: {}", driverPath);
|
||||
driver = DriverLoader.load(driverName, driverPath);
|
||||
} catch (DriverLoadException ex) {
|
||||
LOGGER.debug("Unable to load database driver", ex);
|
||||
throw new DatabaseException("Unable to load database driver", ex);
|
||||
}
|
||||
}
|
||||
userName = Settings.getString(Settings.KEYS.DB_USER, "dcuser");
|
||||
userName = settings.getString(Settings.KEYS.DB_USER, "dcuser");
|
||||
//yes, yes - hard-coded password - only if there isn't one in the properties file.
|
||||
password = Settings.getString(Settings.KEYS.DB_PASSWORD, "DC-Pass1337!");
|
||||
password = settings.getString(Settings.KEYS.DB_PASSWORD, "DC-Pass1337!");
|
||||
try {
|
||||
connectionString = Settings.getConnectionString(
|
||||
connectionString = settings.getConnectionString(
|
||||
Settings.KEYS.DB_CONNECTION_STRING,
|
||||
Settings.KEYS.DB_FILE_NAME);
|
||||
} catch (IOException ex) {
|
||||
@@ -158,7 +159,7 @@ public final class ConnectionFactory {
|
||||
connectionString = connectionString.replace("AUTO_SERVER=TRUE;", "");
|
||||
try {
|
||||
conn = DriverManager.getConnection(connectionString, userName, password);
|
||||
Settings.setString(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
settings.setString(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
LOGGER.debug("Unable to start the database in server mode; reverting to single user mode");
|
||||
} catch (SQLException sqlex) {
|
||||
LOGGER.debug("Unable to connect to the database", ex);
|
||||
@@ -169,7 +170,6 @@ public final class ConnectionFactory {
|
||||
throw new DatabaseException("Unable to connect to the database", ex);
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldCreateSchema) {
|
||||
try {
|
||||
createTables(conn);
|
||||
@@ -201,16 +201,9 @@ public final class ConnectionFactory {
|
||||
* finalize method being called as during shutdown the class loader used to
|
||||
* load the driver may be unloaded prior to the driver being de-registered.
|
||||
*/
|
||||
public static void cleanup() {
|
||||
public synchronized void cleanup() {
|
||||
if (driver != null) {
|
||||
try {
|
||||
DriverManager.deregisterDriver(driver);
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.debug("An error occurred unloading the database driver", ex);
|
||||
} catch (Throwable unexpected) {
|
||||
LOGGER.debug(
|
||||
"An unexpected throwable occurred unloading the database driver", unexpected);
|
||||
}
|
||||
DriverLoader.cleanup(driver);
|
||||
driver = null;
|
||||
}
|
||||
connectionString = null;
|
||||
@@ -226,7 +219,7 @@ public final class ConnectionFactory {
|
||||
* @throws DatabaseException thrown if there is an exception loading the
|
||||
* database connection
|
||||
*/
|
||||
public static Connection getConnection() throws DatabaseException {
|
||||
public synchronized Connection getConnection() throws DatabaseException {
|
||||
initialize();
|
||||
Connection conn = null;
|
||||
try {
|
||||
@@ -246,22 +239,57 @@ public final class ConnectionFactory {
|
||||
* @throws IOException thrown if the data directory does not exist and
|
||||
* cannot be created
|
||||
*/
|
||||
public static boolean h2DataFileExists() throws IOException {
|
||||
final File dir = Settings.getDataDirectory();
|
||||
final String fileName = Settings.getString(Settings.KEYS.DB_FILE_NAME);
|
||||
final File file = new File(dir, fileName);
|
||||
public boolean h2DataFileExists() throws IOException {
|
||||
return h2DataFileExists(settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the H2 database file exists. If it does not exist then the
|
||||
* data structure will need to be created.
|
||||
*
|
||||
* @param configuration the configured settings
|
||||
* @return true if the H2 database file does not exist; otherwise false
|
||||
* @throws IOException thrown if the data directory does not exist and
|
||||
* cannot be created
|
||||
*/
|
||||
public static boolean h2DataFileExists(Settings configuration) throws IOException {
|
||||
final File file = getH2DataFile(configuration);
|
||||
return file.exists();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a reference to the H2 database file.
|
||||
*
|
||||
* @param configuration the configured settings
|
||||
* @return the path to the H2 database file
|
||||
* @throws IOException thrown if there is an error
|
||||
*/
|
||||
public static File getH2DataFile(Settings configuration) throws IOException {
|
||||
final File dir = configuration.getDataDirectory();
|
||||
final String fileName = configuration.getString(Settings.KEYS.DB_FILE_NAME);
|
||||
final File file = new File(dir, fileName);
|
||||
return file;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the connection string is for an H2 database.
|
||||
*
|
||||
* @return true if the connection string is for an H2 database
|
||||
*/
|
||||
public static boolean isH2Connection() {
|
||||
public boolean isH2Connection() {
|
||||
return isH2Connection(settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the connection string is for an H2 database.
|
||||
*
|
||||
* @param configuration the configured settings
|
||||
* @return true if the connection string is for an H2 database
|
||||
*/
|
||||
public static boolean isH2Connection(Settings configuration) {
|
||||
String connStr;
|
||||
try {
|
||||
connStr = Settings.getConnectionString(
|
||||
connStr = configuration.getConnectionString(
|
||||
Settings.KEYS.DB_CONNECTION_STRING,
|
||||
Settings.KEYS.DB_FILE_NAME);
|
||||
} catch (IOException ex) {
|
||||
@@ -278,7 +306,7 @@ public final class ConnectionFactory {
|
||||
* @param conn the database connection
|
||||
* @throws DatabaseException thrown if there is a Database Exception
|
||||
*/
|
||||
private static void createTables(Connection conn) throws DatabaseException {
|
||||
private void createTables(Connection conn) throws DatabaseException {
|
||||
LOGGER.debug("Creating database structure");
|
||||
InputStream is = null;
|
||||
try {
|
||||
@@ -315,7 +343,7 @@ public final class ConnectionFactory {
|
||||
* @throws DatabaseException thrown if there is an exception upgrading the
|
||||
* database schema
|
||||
*/
|
||||
private static void updateSchema(Connection conn, DependencyVersion appExpectedVersion, DependencyVersion currentDbVersion)
|
||||
private void updateSchema(Connection conn, DependencyVersion appExpectedVersion, DependencyVersion currentDbVersion)
|
||||
throws DatabaseException {
|
||||
|
||||
final String databaseProductName;
|
||||
@@ -363,7 +391,7 @@ public final class ConnectionFactory {
|
||||
final int c1 = Integer.parseInt(currentDbVersion.getVersionParts().get(1));
|
||||
if (e0 == c0 && e1 < c1) {
|
||||
LOGGER.warn("A new version of dependency-check is available; consider upgrading");
|
||||
Settings.setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
settings.setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
} else if (e0 == c0 && e1 == c1) {
|
||||
//do nothing - not sure how we got here, but just in case...
|
||||
} else {
|
||||
@@ -382,7 +410,7 @@ public final class ConnectionFactory {
|
||||
* @throws DatabaseException thrown if the schema version is not compatible
|
||||
* with this version of dependency-check
|
||||
*/
|
||||
private static void ensureSchemaVersion(Connection conn) throws DatabaseException {
|
||||
private void ensureSchemaVersion(Connection conn) throws DatabaseException {
|
||||
ResultSet rs = null;
|
||||
PreparedStatement ps = null;
|
||||
try {
|
||||
@@ -390,7 +418,8 @@ public final class ConnectionFactory {
|
||||
ps = conn.prepareStatement("SELECT value FROM properties WHERE id = 'version'");
|
||||
rs = ps.executeQuery();
|
||||
if (rs.next()) {
|
||||
final DependencyVersion appDbVersion = DependencyVersionUtil.parseVersion(DB_SCHEMA_VERSION);
|
||||
final String dbSchemaVersion = settings.getString(Settings.KEYS.DB_VERSION);
|
||||
final DependencyVersion appDbVersion = DependencyVersionUtil.parseVersion(dbSchemaVersion);
|
||||
if (appDbVersion == null) {
|
||||
throw new DatabaseException("Invalid application database schema");
|
||||
}
|
||||
@@ -399,7 +428,7 @@ public final class ConnectionFactory {
|
||||
throw new DatabaseException("Invalid database schema");
|
||||
}
|
||||
if (appDbVersion.compareTo(db) > 0) {
|
||||
LOGGER.debug("Current Schema: {}", DB_SCHEMA_VERSION);
|
||||
LOGGER.debug("Current Schema: {}", dbSchemaVersion);
|
||||
LOGGER.debug("DB Schema: {}", rs.getString(1));
|
||||
updateSchema(conn, appDbVersion, db);
|
||||
if (++callDepth < 10) {
|
||||
|
||||
@@ -17,11 +17,15 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.nvdcve;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* An exception used to indicate the db4o database is corrupt. This could be due to invalid data or a complete failure of the db.
|
||||
* An exception used to indicate the db4o database is corrupt. This could be due
|
||||
* to invalid data or a complete failure of the db.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class CorruptDatabaseException extends DatabaseException {
|
||||
|
||||
/**
|
||||
|
||||
@@ -50,9 +50,11 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
//CSOFF: AvoidStarImport
|
||||
import static org.owasp.dependencycheck.data.nvdcve.CveDB.PreparedStatementCveDb.*;
|
||||
//CSON: AvoidStarImport
|
||||
import static org.apache.commons.collections.map.AbstractReferenceMap.HARD;
|
||||
import static org.apache.commons.collections.map.AbstractReferenceMap.SOFT;
|
||||
import static org.owasp.dependencycheck.data.nvdcve.CveDB.PreparedStatementCveDb.*;
|
||||
|
||||
/**
|
||||
* The database holding information about the NVD CVE data. This class is safe
|
||||
@@ -64,19 +66,14 @@ import static org.owasp.dependencycheck.data.nvdcve.CveDB.PreparedStatementCveDb
|
||||
@ThreadSafe
|
||||
public final class CveDB implements AutoCloseable {
|
||||
|
||||
/**
|
||||
* Singleton instance of the CveDB.
|
||||
*/
|
||||
private static CveDB instance = null;
|
||||
/**
|
||||
* Track the number of current users of the CveDB; so that if someone is
|
||||
* using database another user cannot close the connection on them.
|
||||
*/
|
||||
private int usageCount = 0;
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(CveDB.class);
|
||||
/**
|
||||
* The database connection factory.
|
||||
*/
|
||||
private final ConnectionFactory connectionFactory;
|
||||
/**
|
||||
* Database connection
|
||||
*/
|
||||
@@ -100,6 +97,10 @@ public final class CveDB implements AutoCloseable {
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private final Map<String, List<Vulnerability>> vulnerabilitiesForCpeCache = Collections.synchronizedMap(new ReferenceMap(HARD, SOFT));
|
||||
/**
|
||||
* The configured settings
|
||||
*/
|
||||
private final Settings settings;
|
||||
|
||||
/**
|
||||
* The enum value names must match the keys of the statements in the
|
||||
@@ -196,31 +197,18 @@ public final class CveDB implements AutoCloseable {
|
||||
UPDATE_VULNERABILITY
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the CveDB singleton object.
|
||||
*
|
||||
* @return the CveDB singleton
|
||||
* @throws DatabaseException thrown if there is a database error
|
||||
*/
|
||||
public static synchronized CveDB getInstance() throws DatabaseException {
|
||||
if (instance == null) {
|
||||
instance = new CveDB();
|
||||
}
|
||||
if (!instance.isOpen()) {
|
||||
instance.open();
|
||||
}
|
||||
instance.usageCount += 1;
|
||||
return instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new CveDB object and opens the database connection. Note, the
|
||||
* connection must be closed by the caller by calling the close method.
|
||||
*
|
||||
* @param settings the configured settings
|
||||
* @throws DatabaseException thrown if there is an exception opening the
|
||||
* database.
|
||||
*/
|
||||
private CveDB() throws DatabaseException {
|
||||
public CveDB(Settings settings) throws DatabaseException {
|
||||
this.settings = settings;
|
||||
connectionFactory = new ConnectionFactory(settings);
|
||||
open();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -229,7 +217,7 @@ public final class CveDB implements AutoCloseable {
|
||||
* @param conn the database connection
|
||||
* @return the product name of the database if successful, {@code null} else
|
||||
*/
|
||||
private static String determineDatabaseProductName(Connection conn) {
|
||||
private String determineDatabaseProductName(Connection conn) {
|
||||
try {
|
||||
final String databaseProductName = conn.getMetaData().getDatabaseProductName().toLowerCase();
|
||||
LOGGER.debug("Database product: {}", databaseProductName);
|
||||
@@ -240,16 +228,6 @@ public final class CveDB implements AutoCloseable {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Method added for testing, returns the current usage count of the CveDB
|
||||
* singleton.
|
||||
*
|
||||
* @return the current usage of the CveDB singleton
|
||||
*/
|
||||
protected synchronized int getUsageCount() {
|
||||
return usageCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the database connection. If the database does not exist, it will
|
||||
* create a new one.
|
||||
@@ -259,14 +237,14 @@ public final class CveDB implements AutoCloseable {
|
||||
*/
|
||||
private synchronized void open() throws DatabaseException {
|
||||
try {
|
||||
if (!instance.isOpen()) {
|
||||
instance.connection = ConnectionFactory.getConnection();
|
||||
final String databaseProductName = determineDatabaseProductName(instance.connection);
|
||||
instance.statementBundle = databaseProductName != null
|
||||
if (!isOpen()) {
|
||||
connection = connectionFactory.getConnection();
|
||||
final String databaseProductName = determineDatabaseProductName(this.connection);
|
||||
statementBundle = databaseProductName != null
|
||||
? ResourceBundle.getBundle("data/dbStatements", new Locale(databaseProductName))
|
||||
: ResourceBundle.getBundle("data/dbStatements");
|
||||
instance.prepareStatements();
|
||||
instance.databaseProperties = new DatabaseProperties(instance);
|
||||
prepareStatements();
|
||||
databaseProperties = new DatabaseProperties(this);
|
||||
}
|
||||
} catch (DatabaseException e) {
|
||||
releaseResources();
|
||||
@@ -280,23 +258,20 @@ public final class CveDB implements AutoCloseable {
|
||||
*/
|
||||
@Override
|
||||
public synchronized void close() {
|
||||
if (instance != null) {
|
||||
instance.usageCount -= 1;
|
||||
if (instance.usageCount <= 0 && instance.isOpen()) {
|
||||
instance.usageCount = 0;
|
||||
clearCache();
|
||||
instance.closeStatements();
|
||||
try {
|
||||
instance.connection.close();
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.error("There was an error attempting to close the CveDB, see the log for more details.");
|
||||
LOGGER.debug("", ex);
|
||||
} catch (Throwable ex) {
|
||||
LOGGER.error("There was an exception attempting to close the CveDB, see the log for more details.");
|
||||
LOGGER.debug("", ex);
|
||||
}
|
||||
releaseResources();
|
||||
if (isOpen()) {
|
||||
clearCache();
|
||||
closeStatements();
|
||||
try {
|
||||
connection.close();
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.error("There was an error attempting to close the CveDB, see the log for more details.");
|
||||
LOGGER.debug("", ex);
|
||||
} catch (Throwable ex) {
|
||||
LOGGER.error("There was an exception attempting to close the CveDB, see the log for more details.");
|
||||
LOGGER.debug("", ex);
|
||||
}
|
||||
releaseResources();
|
||||
connectionFactory.cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -304,10 +279,10 @@ public final class CveDB implements AutoCloseable {
|
||||
* Releases the resources used by CveDB.
|
||||
*/
|
||||
private synchronized void releaseResources() {
|
||||
instance.statementBundle = null;
|
||||
instance.preparedStatements.clear();
|
||||
instance.databaseProperties = null;
|
||||
instance.connection = null;
|
||||
statementBundle = null;
|
||||
preparedStatements.clear();
|
||||
databaseProperties = null;
|
||||
connection = null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -379,6 +354,7 @@ public final class CveDB implements AutoCloseable {
|
||||
*
|
||||
* @throws SQLException thrown if a SQL Exception occurs
|
||||
*/
|
||||
@SuppressWarnings("EmptyMethod")
|
||||
public synchronized void commit() throws SQLException {
|
||||
//temporary remove this as autocommit is on.
|
||||
//if (isOpen()) {
|
||||
@@ -539,7 +515,7 @@ public final class CveDB implements AutoCloseable {
|
||||
*
|
||||
* It should be also called when DB is closed.
|
||||
*/
|
||||
private void clearCache() {
|
||||
private synchronized void clearCache() {
|
||||
vulnerabilitiesForCpeCache.clear();
|
||||
}
|
||||
|
||||
@@ -836,15 +812,15 @@ public final class CveDB implements AutoCloseable {
|
||||
} catch (Exception ex) {
|
||||
String dd;
|
||||
try {
|
||||
dd = Settings.getDataDirectory().getAbsolutePath();
|
||||
dd = settings.getDataDirectory().getAbsolutePath();
|
||||
} catch (IOException ex1) {
|
||||
dd = Settings.getString(Settings.KEYS.DATA_DIRECTORY);
|
||||
dd = settings.getString(Settings.KEYS.DATA_DIRECTORY);
|
||||
}
|
||||
LOGGER.error("Unable to access the local database.\n\nEnsure that '{}' is a writable directory. "
|
||||
+ "If the problem persist try deleting the files in '{}' and running {} again. If the problem continues, please "
|
||||
+ "create a log file (see documentation at http://jeremylong.github.io/DependencyCheck/) and open a ticket at "
|
||||
+ "https://github.com/jeremylong/DependencyCheck/issues and include the log file.\n\n",
|
||||
dd, dd, Settings.getString(Settings.KEYS.APPLICATION_NAME));
|
||||
dd, dd, settings.getString(Settings.KEYS.APPLICATION_NAME));
|
||||
LOGGER.debug("", ex);
|
||||
} finally {
|
||||
DBUtils.closeResultSet(rs);
|
||||
|
||||
@@ -17,11 +17,14 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.nvdcve;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* An exception thrown if an operation against the database fails.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class DatabaseException extends RuntimeException {
|
||||
|
||||
/**
|
||||
|
||||
@@ -97,7 +97,7 @@ public class DatabaseProperties {
|
||||
*
|
||||
* @return whether or not any properties are set
|
||||
*/
|
||||
public boolean isEmpty() {
|
||||
public synchronized boolean isEmpty() {
|
||||
return properties == null || properties.isEmpty();
|
||||
}
|
||||
|
||||
@@ -107,7 +107,7 @@ public class DatabaseProperties {
|
||||
* @param updatedValue the updated NVD CVE entry
|
||||
* @throws UpdateException is thrown if there is an update exception
|
||||
*/
|
||||
public void save(NvdCveInfo updatedValue) throws UpdateException {
|
||||
public synchronized void save(NvdCveInfo updatedValue) throws UpdateException {
|
||||
if (updatedValue == null) {
|
||||
return;
|
||||
}
|
||||
@@ -121,7 +121,7 @@ public class DatabaseProperties {
|
||||
* @param value the property value
|
||||
* @throws UpdateException is thrown if there is an update exception
|
||||
*/
|
||||
public void save(String key, String value) throws UpdateException {
|
||||
public synchronized void save(String key, String value) throws UpdateException {
|
||||
properties.put(key, value);
|
||||
cveDB.saveProperty(key, value);
|
||||
}
|
||||
@@ -133,7 +133,7 @@ public class DatabaseProperties {
|
||||
* @param key the property key
|
||||
* @return the value of the property
|
||||
*/
|
||||
public String getProperty(String key) {
|
||||
public synchronized String getProperty(String key) {
|
||||
return properties.getProperty(key);
|
||||
}
|
||||
|
||||
@@ -145,7 +145,7 @@ public class DatabaseProperties {
|
||||
* @param defaultValue the default value
|
||||
* @return the value of the property
|
||||
*/
|
||||
public String getProperty(String key, String defaultValue) {
|
||||
public synchronized String getProperty(String key, String defaultValue) {
|
||||
return properties.getProperty(key, defaultValue);
|
||||
}
|
||||
|
||||
@@ -154,7 +154,7 @@ public class DatabaseProperties {
|
||||
*
|
||||
* @return the collection of Database Properties
|
||||
*/
|
||||
public Properties getProperties() {
|
||||
public synchronized Properties getProperties() {
|
||||
return properties;
|
||||
}
|
||||
|
||||
@@ -165,7 +165,7 @@ public class DatabaseProperties {
|
||||
*
|
||||
* @return a map of the database meta data
|
||||
*/
|
||||
public Map<String, String> getMetaData() {
|
||||
public synchronized Map<String, String> getMetaData() {
|
||||
final Map<String, String> map = new TreeMap<>();
|
||||
for (Entry<Object, Object> entry : properties.entrySet()) {
|
||||
final String key = (String) entry.getKey();
|
||||
|
||||
@@ -17,11 +17,14 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.nvdcve;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* An exception thrown the database driver is unable to be loaded.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class DriverLoadException extends Exception {
|
||||
|
||||
/**
|
||||
|
||||
@@ -31,12 +31,14 @@ import java.sql.DriverManager;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* DriverLoader is a utility class that is used to load database drivers.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public final class DriverLoader {
|
||||
|
||||
/**
|
||||
@@ -44,6 +46,21 @@ public final class DriverLoader {
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DriverLoader.class);
|
||||
|
||||
/**
|
||||
* De-registers the driver.
|
||||
*
|
||||
* @param driver the driver to de-register
|
||||
*/
|
||||
public static void cleanup(Driver driver) {
|
||||
try {
|
||||
DriverManager.deregisterDriver(driver);
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.debug("An error occurred unloading the database driver", ex);
|
||||
} catch (Throwable unexpected) {
|
||||
LOGGER.debug("An unexpected throwable occurred unloading the database driver", unexpected);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Private constructor for a utility class.
|
||||
*/
|
||||
@@ -51,25 +68,30 @@ public final class DriverLoader {
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the specified class using the system class loader and registers the driver with the driver manager.
|
||||
* Loads the specified class using the system class loader and registers the
|
||||
* driver with the driver manager.
|
||||
*
|
||||
* @param className the fully qualified name of the desired class
|
||||
* @return the loaded Driver
|
||||
* @throws DriverLoadException thrown if the driver cannot be loaded
|
||||
*/
|
||||
public static Driver load(String className) throws DriverLoadException {
|
||||
final ClassLoader loader = DriverLoader.class.getClassLoader(); //ClassLoader.getSystemClassLoader();
|
||||
final ClassLoader loader = DriverLoader.class.getClassLoader();
|
||||
return load(className, loader);
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the specified class by registering the supplied paths to the class loader and then registers the driver with the
|
||||
* driver manager. The pathToDriver argument is added to the class loader so that an external driver can be loaded. Note, the
|
||||
* pathToDriver can contain a semi-colon separated list of paths so any dependencies can be added as needed. If a path in the
|
||||
* pathToDriver argument is a directory all files in the directory are added to the class path.
|
||||
* Loads the specified class by registering the supplied paths to the class
|
||||
* loader and then registers the driver with the driver manager. The
|
||||
* pathToDriver argument is added to the class loader so that an external
|
||||
* driver can be loaded. Note, the pathToDriver can contain a semi-colon
|
||||
* separated list of paths so any dependencies can be added as needed. If a
|
||||
* path in the pathToDriver argument is a directory all files in the
|
||||
* directory are added to the class path.
|
||||
*
|
||||
* @param className the fully qualified name of the desired class
|
||||
* @param pathToDriver the path to the JAR file containing the driver; note, this can be a semi-colon separated list of paths
|
||||
* @param pathToDriver the path to the JAR file containing the driver; note,
|
||||
* this can be a semi-colon separated list of paths
|
||||
* @return the loaded Driver
|
||||
* @throws DriverLoadException thrown if the driver cannot be loaded
|
||||
*/
|
||||
@@ -113,7 +135,8 @@ public final class DriverLoader {
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the specified class using the supplied class loader and registers the driver with the driver manager.
|
||||
* Loads the specified class using the supplied class loader and registers
|
||||
* the driver with the driver manager.
|
||||
*
|
||||
* @param className the fully qualified name of the desired class
|
||||
* @param loader the class loader to use when loading the driver
|
||||
@@ -125,6 +148,8 @@ public final class DriverLoader {
|
||||
final Class c = Class.forName(className, true, loader);
|
||||
//final Class c = loader.loadClass(className);
|
||||
final Driver driver = (Driver) c.newInstance();
|
||||
|
||||
//TODO add usage count so we don't de-register a driver that is in use.
|
||||
final Driver shim = new DriverShim(driver);
|
||||
//using the DriverShim to get around the fact that the DriverManager won't register a driver not in the base class path
|
||||
DriverManager.registerDriver(shim);
|
||||
|
||||
@@ -28,16 +28,19 @@ import java.sql.DriverPropertyInfo;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.SQLFeatureNotSupportedException;
|
||||
import java.util.Properties;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Driver shim to get around the class loader issue with the DriverManager. The following code is a nearly identical
|
||||
* copy (with more comments and a few more methods implemented) of the DriverShim from:</p>
|
||||
* Driver shim to get around the class loader issue with the DriverManager. The
|
||||
* following code is a nearly identical copy (with more comments and a few more
|
||||
* methods implemented) of the DriverShim from:</p>
|
||||
* <blockquote>http://www.kfu.com/~nsayer/Java/dyn-jdbc.html</blockquote>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
* @see java.sql.Driver
|
||||
*/
|
||||
@ThreadSafe
|
||||
class DriverShim implements Driver {
|
||||
|
||||
/**
|
||||
@@ -59,12 +62,13 @@ class DriverShim implements Driver {
|
||||
}
|
||||
|
||||
/**
|
||||
* Wraps the underlying driver's call to acceptsURL. Returns whether or not the driver can open a connection to the
|
||||
* given URL.
|
||||
* Wraps the underlying driver's call to acceptsURL. Returns whether or not
|
||||
* the driver can open a connection to the given URL.
|
||||
*
|
||||
* @param url the URL of the database
|
||||
* @return true if the wrapped driver can connect to the specified URL
|
||||
* @throws SQLException thrown if there is an error connecting to the database
|
||||
* @throws SQLException thrown if there is an error connecting to the
|
||||
* database
|
||||
* @see java.sql.Driver#acceptsURL(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
@@ -78,7 +82,8 @@ class DriverShim implements Driver {
|
||||
* @param url the URL of the database
|
||||
* @param info a collection of string/value pairs
|
||||
* @return a Connection object
|
||||
* @throws SQLException thrown if there is an error connecting to the database
|
||||
* @throws SQLException thrown if there is an error connecting to the
|
||||
* database
|
||||
* @see java.sql.Driver#connect(java.lang.String, java.util.Properties)
|
||||
*/
|
||||
@Override
|
||||
@@ -112,7 +117,8 @@ class DriverShim implements Driver {
|
||||
* Wraps the call to the underlying driver's getParentLogger method.
|
||||
*
|
||||
* @return the parent's Logger
|
||||
* @throws SQLFeatureNotSupportedException thrown if the feature is not supported
|
||||
* @throws SQLFeatureNotSupportedException thrown if the feature is not
|
||||
* supported
|
||||
* @see java.sql.Driver#getParentLogger()
|
||||
*/
|
||||
public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException {
|
||||
@@ -140,7 +146,8 @@ class DriverShim implements Driver {
|
||||
* @param info a collection of string/value pairs
|
||||
* @return an array of DriverPropertyInfo objects
|
||||
* @throws SQLException thrown if there is an error accessing the database
|
||||
* @see java.sql.Driver#getPropertyInfo(java.lang.String, java.util.Properties)
|
||||
* @see java.sql.Driver#getPropertyInfo(java.lang.String,
|
||||
* java.util.Properties)
|
||||
*/
|
||||
@Override
|
||||
public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException {
|
||||
|
||||
@@ -17,21 +17,25 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.update;
|
||||
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
|
||||
/**
|
||||
* Defines a data source who's data is retrieved from the Internet. This data can be downloaded and the local cache
|
||||
* updated.
|
||||
* Defines a data source who's data is retrieved from the Internet. This data
|
||||
* can be downloaded and the local cache updated.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public interface CachedWebDataSource {
|
||||
|
||||
/**
|
||||
* Determines if an update to the current data store is needed, if it is the new data is downloaded from the
|
||||
* Internet and imported into the current cached data store.
|
||||
* Determines if an update to the current data store is needed, if it is the
|
||||
* new data is downloaded from the Internet and imported into the current
|
||||
* cached data store.
|
||||
*
|
||||
* @throws UpdateException is thrown if there is an exception downloading the data or updating the data store.
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws UpdateException is thrown if there is an exception downloading
|
||||
* the data or updating the data store.
|
||||
*/
|
||||
void update() throws UpdateException;
|
||||
void update(Engine engine) throws UpdateException;
|
||||
}
|
||||
|
||||
@@ -21,7 +21,9 @@ import java.io.IOException;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
|
||||
@@ -43,6 +45,7 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class EngineVersionCheck implements CachedWebDataSource {
|
||||
|
||||
/**
|
||||
@@ -62,6 +65,25 @@ public class EngineVersionCheck implements CachedWebDataSource {
|
||||
* against.
|
||||
*/
|
||||
private String updateToVersion;
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private Settings settings;
|
||||
|
||||
/**
|
||||
* Constructs a new engine version check utility for testing.
|
||||
*
|
||||
* @param settings the configured settings
|
||||
*/
|
||||
protected EngineVersionCheck(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new engine version check utility.
|
||||
*/
|
||||
public EngineVersionCheck() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Getter for updateToVersion - only used for testing. Represents the
|
||||
@@ -92,12 +114,14 @@ public class EngineVersionCheck implements CachedWebDataSource {
|
||||
* be updated
|
||||
*/
|
||||
@Override
|
||||
public void update() throws UpdateException {
|
||||
try (CveDB db = CveDB.getInstance()) {
|
||||
final boolean autoupdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE, true);
|
||||
final boolean enabled = Settings.getBoolean(Settings.KEYS.UPDATE_VERSION_CHECK_ENABLED, true);
|
||||
final String original = Settings.getString(Settings.KEYS.CVE_ORIGINAL_MODIFIED_20_URL);
|
||||
final String current = Settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL);
|
||||
public void update(Engine engine) throws UpdateException {
|
||||
this.settings = engine.getSettings();
|
||||
try {
|
||||
final CveDB db = engine.getDatabase();
|
||||
final boolean autoupdate = settings.getBoolean(Settings.KEYS.AUTO_UPDATE, true);
|
||||
final boolean enabled = settings.getBoolean(Settings.KEYS.UPDATE_VERSION_CHECK_ENABLED, true);
|
||||
final String original = settings.getString(Settings.KEYS.CVE_ORIGINAL_MODIFIED_20_URL);
|
||||
final String current = settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL);
|
||||
/*
|
||||
* Only update if auto-update is enabled, the engine check is
|
||||
* enabled, and the NVD CVE URLs have not been modified (i.e. the
|
||||
@@ -111,7 +135,7 @@ public class EngineVersionCheck implements CachedWebDataSource {
|
||||
final long lastChecked = Long.parseLong(properties.getProperty(ENGINE_VERSION_CHECKED_ON, "0"));
|
||||
final long now = System.currentTimeMillis();
|
||||
updateToVersion = properties.getProperty(CURRENT_ENGINE_RELEASE, "");
|
||||
final String currentVersion = Settings.getString(Settings.KEYS.APPLICATION_VERSION, "0.0.0");
|
||||
final String currentVersion = settings.getString(Settings.KEYS.APPLICATION_VERSION, "0.0.0");
|
||||
LOGGER.debug("Last checked: {}", lastChecked);
|
||||
LOGGER.debug("Now: {}", now);
|
||||
LOGGER.debug("Current version: {}", currentVersion);
|
||||
@@ -184,9 +208,10 @@ public class EngineVersionCheck implements CachedWebDataSource {
|
||||
protected String getCurrentReleaseVersion() {
|
||||
HttpURLConnection conn = null;
|
||||
try {
|
||||
final String str = Settings.getString(Settings.KEYS.ENGINE_VERSION_CHECK_URL, "http://jeremylong.github.io/DependencyCheck/current.txt");
|
||||
final String str = settings.getString(Settings.KEYS.ENGINE_VERSION_CHECK_URL, "http://jeremylong.github.io/DependencyCheck/current.txt");
|
||||
final URL url = new URL(str);
|
||||
conn = URLConnectionFactory.createHttpURLConnection(url);
|
||||
final URLConnectionFactory factory = new URLConnectionFactory(settings);
|
||||
conn = factory.createHttpURLConnection(url);
|
||||
conn.connect();
|
||||
if (conn.getResponseCode() != 200) {
|
||||
return null;
|
||||
|
||||
@@ -17,9 +17,6 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.update;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.net.MalformedURLException;
|
||||
import java.util.Calendar;
|
||||
import java.util.HashMap;
|
||||
@@ -27,8 +24,6 @@ import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.net.URL;
|
||||
import java.nio.channels.FileLock;
|
||||
import java.util.Date;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
@@ -36,7 +31,8 @@ import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.ConnectionFactory;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
|
||||
@@ -47,11 +43,9 @@ import org.owasp.dependencycheck.data.update.nvd.DownloadTask;
|
||||
import org.owasp.dependencycheck.data.update.nvd.NvdCveInfo;
|
||||
import org.owasp.dependencycheck.data.update.nvd.ProcessTask;
|
||||
import org.owasp.dependencycheck.data.update.nvd.UpdateableNvdCve;
|
||||
import org.owasp.dependencycheck.exception.H2DBLockException;
|
||||
import org.owasp.dependencycheck.utils.DateUtil;
|
||||
import org.owasp.dependencycheck.utils.Downloader;
|
||||
import org.owasp.dependencycheck.utils.DownloadFailedException;
|
||||
import org.owasp.dependencycheck.utils.H2DBLock;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
@@ -62,6 +56,7 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class NvdCveUpdater implements CachedWebDataSource {
|
||||
|
||||
/**
|
||||
@@ -85,7 +80,10 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
* very CPU-intense, e.g. downloading files.
|
||||
*/
|
||||
private ExecutorService downloadExecutorService = null;
|
||||
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private Settings settings;
|
||||
/**
|
||||
* Reference to the DAO.
|
||||
*/
|
||||
@@ -101,22 +99,21 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
* prevent more then one thread/JVM from updating the database at the same
|
||||
* time. This method may sleep upto 5 minutes.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws UpdateException is thrown if there is an error updating the
|
||||
* database
|
||||
*/
|
||||
@Override
|
||||
public synchronized void update() throws UpdateException {
|
||||
public synchronized void update(Engine engine) throws UpdateException {
|
||||
this.settings = engine.getSettings();
|
||||
this.cveDb = engine.getDatabase();
|
||||
if (isUpdateConfiguredFalse()) {
|
||||
return;
|
||||
}
|
||||
H2DBLock dbupdate = new H2DBLock();
|
||||
try {
|
||||
dbupdate.lock();
|
||||
initializeExecutorServices();
|
||||
cveDb = CveDB.getInstance();
|
||||
dbProperties = cveDb.getDatabaseProperties();
|
||||
|
||||
if (checkUpdate()) {
|
||||
initializeExecutorServices();
|
||||
final UpdateableNvdCve updateable = getUpdatesNeeded();
|
||||
if (updateable.isUpdateNeeded()) {
|
||||
performUpdate(updateable);
|
||||
@@ -127,19 +124,13 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
throw new UpdateException("NVD CVE properties files contain an invalid URL, unable to update the data to use the most current data.", ex);
|
||||
} catch (DownloadFailedException ex) {
|
||||
LOGGER.warn("Unable to download the NVD CVE data; the results may not include the most recent CPE/CVEs from the NVD.");
|
||||
if (Settings.getString(Settings.KEYS.PROXY_SERVER) == null) {
|
||||
if (settings.getString(Settings.KEYS.PROXY_SERVER) == null) {
|
||||
LOGGER.info("If you are behind a proxy you may need to configure dependency-check to use the proxy.");
|
||||
}
|
||||
throw new UpdateException("Unable to download the NVD CVE data.", ex);
|
||||
} catch (DatabaseException ex) {
|
||||
throw new UpdateException("Database Exception, unable to update the data to use the most current data.", ex);
|
||||
} catch (H2DBLockException ex) {
|
||||
throw new UpdateException("Unable to obtain an exclusive lock on the H2 database to perform updates", ex);
|
||||
} finally {
|
||||
if (cveDb != null) {
|
||||
cveDb.close();
|
||||
}
|
||||
dbupdate.release();
|
||||
shutdownExecutorServices();
|
||||
}
|
||||
}
|
||||
@@ -152,7 +143,7 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
*/
|
||||
private boolean isUpdateConfiguredFalse() {
|
||||
try {
|
||||
if (!Settings.getBoolean(Settings.KEYS.UPDATE_NVDCVE_ENABLED, true)) {
|
||||
if (!settings.getBoolean(Settings.KEYS.UPDATE_NVDCVE_ENABLED, true)) {
|
||||
return true;
|
||||
}
|
||||
} catch (InvalidSettingException ex) {
|
||||
@@ -160,7 +151,7 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
}
|
||||
boolean autoUpdate = true;
|
||||
try {
|
||||
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
|
||||
autoUpdate = settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.debug("Invalid setting for auto-update; using true.");
|
||||
}
|
||||
@@ -204,7 +195,7 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
private boolean checkUpdate() throws UpdateException {
|
||||
boolean proceed = true;
|
||||
// If the valid setting has not been specified, then we proceed to check...
|
||||
final int validForHours = Settings.getInt(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, 0);
|
||||
final int validForHours = settings.getInt(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, 0);
|
||||
if (dataExists() && 0 < validForHours) {
|
||||
// ms Valid = valid (hours) x 60 min/hour x 60 sec/min x 1000 ms/sec
|
||||
final long msValid = validForHours * 60L * 60L * 1000L;
|
||||
@@ -213,8 +204,7 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
proceed = (now - lastChecked) > msValid;
|
||||
if (!proceed) {
|
||||
LOGGER.info("Skipping NVD check since last check was within {} hours.", validForHours);
|
||||
LOGGER.debug("Last NVD was at {}, and now {} is within {} ms.",
|
||||
lastChecked, now, msValid);
|
||||
LOGGER.debug("Last NVD was at {}, and now {} is within {} ms.", lastChecked, now, msValid);
|
||||
}
|
||||
}
|
||||
return proceed;
|
||||
@@ -226,11 +216,7 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
* @return true if the database contains data
|
||||
*/
|
||||
private boolean dataExists() {
|
||||
try (CveDB cve = CveDB.getInstance()) {
|
||||
return cve.dataExists();
|
||||
} catch (DatabaseException ex) {
|
||||
return false;
|
||||
}
|
||||
return cveDb.dataExists();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -259,7 +245,7 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
final Set<Future<Future<ProcessTask>>> downloadFutures = new HashSet<>(maxUpdates);
|
||||
for (NvdCveInfo cve : updateable) {
|
||||
if (cve.getNeedsUpdate()) {
|
||||
final DownloadTask call = new DownloadTask(cve, processingExecutorService, cveDb, Settings.getInstance());
|
||||
final DownloadTask call = new DownloadTask(cve, processingExecutorService, cveDb, settings);
|
||||
downloadFutures.add(downloadExecutorService.submit(call));
|
||||
}
|
||||
}
|
||||
@@ -303,12 +289,12 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
}
|
||||
|
||||
//always true because <=0 exits early above
|
||||
//if (maxUpdates >= 1) {
|
||||
//ensure the modified file date gets written (we may not have actually updated it)
|
||||
dbProperties.save(updateable.get(MODIFIED));
|
||||
LOGGER.info("Begin database maintenance.");
|
||||
cveDb.cleanupDatabase();
|
||||
LOGGER.info("End database maintenance.");
|
||||
//if (maxUpdates >= 1) {
|
||||
//ensure the modified file date gets written (we may not have actually updated it)
|
||||
dbProperties.save(updateable.get(MODIFIED));
|
||||
LOGGER.info("Begin database maintenance.");
|
||||
cveDb.cleanupDatabase();
|
||||
LOGGER.info("End database maintenance.");
|
||||
//}
|
||||
}
|
||||
|
||||
@@ -345,7 +331,7 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
}
|
||||
if (dbProperties != null && !dbProperties.isEmpty()) {
|
||||
try {
|
||||
final int startYear = Settings.getInt(Settings.KEYS.CVE_START_YEAR, 2002);
|
||||
final int startYear = settings.getInt(Settings.KEYS.CVE_START_YEAR, 2002);
|
||||
final int endYear = Calendar.getInstance().get(Calendar.YEAR);
|
||||
boolean needsFullUpdate = false;
|
||||
for (int y = startYear; y <= endYear; y++) {
|
||||
@@ -357,7 +343,7 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
|
||||
final long lastUpdated = Long.parseLong(dbProperties.getProperty(DatabaseProperties.LAST_UPDATED, "0"));
|
||||
final long now = System.currentTimeMillis();
|
||||
final int days = Settings.getInt(Settings.KEYS.CVE_MODIFIED_VALID_FOR_DAYS, 7);
|
||||
final int days = settings.getInt(Settings.KEYS.CVE_MODIFIED_VALID_FOR_DAYS, 7);
|
||||
if (!needsFullUpdate && lastUpdated == updates.getTimeStamp(MODIFIED)) {
|
||||
updates.clear(); //we don't need to update anything.
|
||||
} else if (!needsFullUpdate && DateUtil.withinDateRange(lastUpdated, now, days)) {
|
||||
@@ -410,25 +396,24 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
private UpdateableNvdCve retrieveCurrentTimestampsFromWeb()
|
||||
throws MalformedURLException, DownloadFailedException, InvalidDataException, InvalidSettingException {
|
||||
|
||||
final int start = Settings.getInt(Settings.KEYS.CVE_START_YEAR);
|
||||
final int start = settings.getInt(Settings.KEYS.CVE_START_YEAR);
|
||||
final int end = Calendar.getInstance().get(Calendar.YEAR);
|
||||
|
||||
final Map<String, Long> lastModifiedDates = retrieveLastModifiedDates(start, end);
|
||||
|
||||
final UpdateableNvdCve updates = new UpdateableNvdCve();
|
||||
|
||||
final String baseUrl20 = Settings.getString(Settings.KEYS.CVE_SCHEMA_2_0);
|
||||
final String baseUrl12 = Settings.getString(Settings.KEYS.CVE_SCHEMA_1_2);
|
||||
final String baseUrl20 = settings.getString(Settings.KEYS.CVE_SCHEMA_2_0);
|
||||
final String baseUrl12 = settings.getString(Settings.KEYS.CVE_SCHEMA_1_2);
|
||||
for (int i = start; i <= end; i++) {
|
||||
final String url = String.format(baseUrl20, i);
|
||||
updates.add(Integer.toString(i), url, String.format(baseUrl12, i),
|
||||
lastModifiedDates.get(url), true);
|
||||
}
|
||||
|
||||
final String url = Settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL);
|
||||
updates.add(MODIFIED, url, Settings.getString(Settings.KEYS.CVE_MODIFIED_12_URL),
|
||||
final String url = settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL);
|
||||
updates.add(MODIFIED, url, settings.getString(Settings.KEYS.CVE_MODIFIED_12_URL),
|
||||
lastModifiedDates.get(url), false);
|
||||
|
||||
return updates;
|
||||
}
|
||||
|
||||
@@ -448,16 +433,16 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
throws MalformedURLException, DownloadFailedException {
|
||||
|
||||
final Set<String> urls = new HashSet<>();
|
||||
final String baseUrl20 = Settings.getString(Settings.KEYS.CVE_SCHEMA_2_0);
|
||||
final String baseUrl20 = settings.getString(Settings.KEYS.CVE_SCHEMA_2_0);
|
||||
for (int i = startYear; i <= endYear; i++) {
|
||||
final String url = String.format(baseUrl20, i);
|
||||
urls.add(url);
|
||||
}
|
||||
urls.add(Settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL));
|
||||
urls.add(settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL));
|
||||
|
||||
final Map<String, Future<Long>> timestampFutures = new HashMap<>();
|
||||
for (String url : urls) {
|
||||
final TimestampRetriever timestampRetriever = new TimestampRetriever(url, Settings.getInstance());
|
||||
final TimestampRetriever timestampRetriever = new TimestampRetriever(url, settings);
|
||||
final Future<Long> future = downloadExecutorService.submit(timestampRetriever);
|
||||
timestampFutures.put(url, future);
|
||||
}
|
||||
@@ -480,6 +465,15 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
return lastModifiedDates;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the settings object; this is used during testing.
|
||||
*
|
||||
* @param settings the configured settings
|
||||
*/
|
||||
protected synchronized void setSettings(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the last modified timestamp from a NVD CVE meta data file.
|
||||
*/
|
||||
@@ -509,10 +503,10 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
public Long call() throws Exception {
|
||||
LOGGER.debug("Checking for updates from: {}", url);
|
||||
try {
|
||||
Settings.setInstance(settings);
|
||||
return Downloader.getLastModified(new URL(url));
|
||||
final Downloader downloader = new Downloader(settings);
|
||||
return downloader.getLastModified(new URL(url));
|
||||
} finally {
|
||||
Settings.cleanup(false);
|
||||
settings.cleanup(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,13 +19,15 @@ package org.owasp.dependencycheck.data.update;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.ServiceLoader;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
|
||||
/**
|
||||
* The CachedWebDataSource Service Loader. This class loads all services that implement
|
||||
* org.owasp.dependencycheck.data.update.CachedWebDataSource.
|
||||
* The CachedWebDataSource Service Loader. This class loads all services that
|
||||
* implement {@link org.owasp.dependencycheck.data.update.CachedWebDataSource}.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@NotThreadSafe
|
||||
public class UpdateService {
|
||||
|
||||
/**
|
||||
@@ -36,14 +38,16 @@ public class UpdateService {
|
||||
/**
|
||||
* Creates a new instance of UpdateService.
|
||||
*
|
||||
* @param classLoader the ClassLoader to use when dynamically loading Analyzer and Update services
|
||||
* @param classLoader the ClassLoader to use when dynamically loading
|
||||
* Analyzer and Update services
|
||||
*/
|
||||
public UpdateService(ClassLoader classLoader) {
|
||||
loader = ServiceLoader.load(CachedWebDataSource.class, classLoader);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an Iterator for all instances of the CachedWebDataSource interface.
|
||||
* Returns an Iterator for all instances of the CachedWebDataSource
|
||||
* interface.
|
||||
*
|
||||
* @return an iterator of CachedWebDataSource.
|
||||
*/
|
||||
|
||||
@@ -20,6 +20,7 @@ package org.owasp.dependencycheck.data.update.cpe;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
import org.owasp.dependencycheck.data.update.NvdCveUpdater;
|
||||
import org.owasp.dependencycheck.data.update.exception.InvalidDataException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
@@ -34,6 +35,7 @@ import org.xml.sax.helpers.DefaultHandler;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@NotThreadSafe
|
||||
public class CPEHandler extends DefaultHandler {
|
||||
|
||||
/**
|
||||
@@ -43,7 +45,7 @@ public class CPEHandler extends DefaultHandler {
|
||||
/**
|
||||
* The Starts with expression to filter CVE entries by CPE.
|
||||
*/
|
||||
private static final String CPE_STARTS_WITH = Settings.getString(Settings.KEYS.CVE_CPE_STARTS_WITH_FILTER, "cpe:/a:");
|
||||
private final String cpeStartsWith;
|
||||
/**
|
||||
* The text content of the node being processed. This can be used during the
|
||||
* end element event.
|
||||
@@ -62,6 +64,15 @@ public class CPEHandler extends DefaultHandler {
|
||||
*/
|
||||
private final List<Cpe> data = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Constructs a new CPE Handler object with the configured settings.
|
||||
*
|
||||
* @param settings the configured settings
|
||||
*/
|
||||
public CPEHandler(Settings settings) {
|
||||
cpeStartsWith = settings.getString(Settings.KEYS.CVE_CPE_STARTS_WITH_FILTER, "cpe:/a:");
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the list of CPE values.
|
||||
*
|
||||
@@ -89,7 +100,7 @@ public class CPEHandler extends DefaultHandler {
|
||||
final String temp = attributes.getValue("deprecated");
|
||||
final String value = attributes.getValue("name");
|
||||
final boolean delete = "true".equalsIgnoreCase(temp);
|
||||
if (!delete && value.startsWith(CPE_STARTS_WITH) && value.length() > 7) {
|
||||
if (!delete && value.startsWith(cpeStartsWith) && value.length() > 7) {
|
||||
try {
|
||||
final Cpe cpe = new Cpe(value);
|
||||
data.add(cpe);
|
||||
|
||||
@@ -20,14 +20,30 @@ package org.owasp.dependencycheck.data.update.cpe;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLDecoder;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.data.update.exception.InvalidDataException;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class Cpe {
|
||||
|
||||
/**
|
||||
* The CPE identifier string (cpe:/a:vendor:product:version).
|
||||
*/
|
||||
private String value;
|
||||
/**
|
||||
* The vendor portion of the identifier.
|
||||
*/
|
||||
private String vendor;
|
||||
|
||||
/**
|
||||
* The product portion of the identifier.
|
||||
*/
|
||||
private String product;
|
||||
|
||||
/**
|
||||
* Constructs a new Cpe Object by parsing the vendor and product from the CPE identifier value.
|
||||
*
|
||||
@@ -47,11 +63,6 @@ public class Cpe {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The CPE identifier string (cpe:/a:vendor:product:version).
|
||||
*/
|
||||
private String value;
|
||||
|
||||
/**
|
||||
* Get the value of value.
|
||||
*
|
||||
@@ -69,10 +80,6 @@ public class Cpe {
|
||||
public void setValue(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
/**
|
||||
* The vendor portion of the identifier.
|
||||
*/
|
||||
private String vendor;
|
||||
|
||||
/**
|
||||
* Get the value of vendor.
|
||||
@@ -92,11 +99,6 @@ public class Cpe {
|
||||
this.vendor = vendor;
|
||||
}
|
||||
|
||||
/**
|
||||
* The product portion of the identifier.
|
||||
*/
|
||||
private String product;
|
||||
|
||||
/**
|
||||
* Get the value of product.
|
||||
*
|
||||
|
||||
@@ -17,11 +17,15 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.update.exception;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* An InvalidDataDataException is a generic exception used when trying to load the NVD CVE meta data.
|
||||
* An InvalidDataDataException is a generic exception used when trying to load
|
||||
* the NVD CVE meta data.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class InvalidDataException extends Exception {
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,11 +17,14 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.update.exception;
|
||||
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* An exception used when an error occurs reading a setting.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class UpdateException extends Exception {
|
||||
|
||||
/**
|
||||
|
||||
@@ -25,6 +25,7 @@ import java.net.URL;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Future;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
import org.owasp.dependencycheck.utils.DownloadFailedException;
|
||||
@@ -39,6 +40,7 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class DownloadTask implements Callable<Future<ProcessTask>> {
|
||||
|
||||
/**
|
||||
@@ -91,8 +93,8 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
|
||||
final File file2;
|
||||
|
||||
try {
|
||||
file1 = File.createTempFile("cve" + nvdCveInfo.getId() + '_', ".xml", Settings.getTempDirectory());
|
||||
file2 = File.createTempFile("cve_1_2_" + nvdCveInfo.getId() + '_', ".xml", Settings.getTempDirectory());
|
||||
file1 = File.createTempFile("cve" + nvdCveInfo.getId() + '_', ".xml", settings.getTempDirectory());
|
||||
file2 = File.createTempFile("cve_1_2_" + nvdCveInfo.getId() + '_', ".xml", settings.getTempDirectory());
|
||||
} catch (IOException ex) {
|
||||
throw new UpdateException("Unable to create temporary files", ex);
|
||||
}
|
||||
@@ -128,15 +130,6 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
|
||||
return first;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of first.
|
||||
*
|
||||
* @param first new value of first
|
||||
*/
|
||||
public void setFirst(File first) {
|
||||
this.first = first;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of second.
|
||||
*
|
||||
@@ -146,29 +139,20 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
|
||||
return second;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of second.
|
||||
*
|
||||
* @param second new value of second
|
||||
*/
|
||||
public void setSecond(File second) {
|
||||
this.second = second;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Future<ProcessTask> call() throws Exception {
|
||||
try {
|
||||
Settings.setInstance(settings);
|
||||
final URL url1 = new URL(nvdCveInfo.getUrl());
|
||||
final URL url2 = new URL(nvdCveInfo.getOldSchemaVersionUrl());
|
||||
LOGGER.info("Download Started for NVD CVE - {}", nvdCveInfo.getId());
|
||||
final long startDownload = System.currentTimeMillis();
|
||||
try {
|
||||
Downloader.fetchFile(url1, first);
|
||||
Downloader.fetchFile(url2, second);
|
||||
final Downloader downloader = new Downloader(settings);
|
||||
downloader.fetchFile(url1, first);
|
||||
downloader.fetchFile(url2, second);
|
||||
} catch (DownloadFailedException ex) {
|
||||
LOGGER.warn("Download Failed for NVD CVE - {}\nSome CVEs may not be reported.", nvdCveInfo.getId());
|
||||
if (Settings.getString(Settings.KEYS.PROXY_SERVER) == null) {
|
||||
if (settings.getString(Settings.KEYS.PROXY_SERVER) == null) {
|
||||
LOGGER.info("If you are behind a proxy you may need to configure dependency-check to use the proxy.");
|
||||
}
|
||||
LOGGER.debug("", ex);
|
||||
@@ -193,7 +177,7 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
|
||||
LOGGER.warn("An exception occurred downloading NVD CVE - {}\nSome CVEs may not be reported.", nvdCveInfo.getId());
|
||||
LOGGER.debug("Download Task Failed", ex);
|
||||
} finally {
|
||||
Settings.cleanup(false);
|
||||
settings.cleanup(false);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
|
||||
import org.xml.sax.Attributes;
|
||||
import org.xml.sax.SAXException;
|
||||
@@ -34,6 +35,7 @@ import org.xml.sax.helpers.DefaultHandler;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@NotThreadSafe
|
||||
public class NvdCve12Handler extends DefaultHandler {
|
||||
|
||||
/**
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user