Support parallelism for analyzers of the same type

This commit is contained in:
Stefan Neuhaus
2016-10-08 19:47:58 +02:00
parent 128a600f18
commit 51c96894b4
23 changed files with 281 additions and 152 deletions

View File

@@ -0,0 +1,61 @@
package org.owasp.dependencycheck;
import org.owasp.dependencycheck.analyzer.Analyzer;
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.concurrent.Callable;
class AnalysisTask implements Callable<Void> {
private static final Logger LOGGER = LoggerFactory.getLogger(AnalysisTask.class);
private final Analyzer analyzer;
private final Dependency dependency;
private final Engine engine;
private final List<Throwable> exceptions;
AnalysisTask(Analyzer analyzer, Dependency dependency, Engine engine, List<Throwable> exceptions) {
this.analyzer = analyzer;
this.dependency = dependency;
this.engine = engine;
this.exceptions = exceptions;
}
@Override
public Void call() throws Exception {
Settings.initialize();
if (shouldAnalyze()) {
LOGGER.debug("Begin Analysis of '{}' ({})", dependency.getActualFilePath(), analyzer.getName());
try {
analyzer.analyze(dependency, engine);
} catch (AnalysisException ex) {
LOGGER.warn("An error occurred while analyzing '{}' ({}).", dependency.getActualFilePath(), analyzer.getName());
LOGGER.debug("", ex);
exceptions.add(ex);
} catch (Throwable ex) {
LOGGER.warn("An unexpected error occurred during analysis of '{}' ({}): {}",
dependency.getActualFilePath(), analyzer.getName(), ex.getMessage());
LOGGER.debug("", ex);
exceptions.add(ex);
}
}
return null;
}
private boolean shouldAnalyze() {
if (analyzer instanceof FileTypeAnalyzer) {
final FileTypeAnalyzer fAnalyzer = (FileTypeAnalyzer) analyzer;
return fAnalyzer.accept(dependency.getActualFile());
}
return true;
}
}

View File

@@ -17,11 +17,11 @@
*/ */
package org.owasp.dependencycheck; package org.owasp.dependencycheck;
import com.google.common.base.Stopwatch;
import org.owasp.dependencycheck.analyzer.AnalysisPhase; import org.owasp.dependencycheck.analyzer.AnalysisPhase;
import org.owasp.dependencycheck.analyzer.Analyzer; import org.owasp.dependencycheck.analyzer.Analyzer;
import org.owasp.dependencycheck.analyzer.AnalyzerService; import org.owasp.dependencycheck.analyzer.AnalyzerService;
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer; import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.data.nvdcve.ConnectionFactory; import org.owasp.dependencycheck.data.nvdcve.ConnectionFactory;
import org.owasp.dependencycheck.data.nvdcve.CveDB; import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException; import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
@@ -29,9 +29,9 @@ import org.owasp.dependencycheck.data.update.CachedWebDataSource;
import org.owasp.dependencycheck.data.update.UpdateService; import org.owasp.dependencycheck.data.update.UpdateService;
import org.owasp.dependencycheck.data.update.exception.UpdateException; import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.exception.NoDataException;
import org.owasp.dependencycheck.exception.ExceptionCollection; import org.owasp.dependencycheck.exception.ExceptionCollection;
import org.owasp.dependencycheck.exception.InitializationException; import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.exception.NoDataException;
import org.owasp.dependencycheck.utils.InvalidSettingException; import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger; import org.slf4j.Logger;
@@ -41,12 +41,18 @@ import java.io.File;
import java.io.FileFilter; import java.io.FileFilter;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.EnumMap; import java.util.EnumMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
/** /**
* Scans files, directories, etc. for Dependencies. Analyzers are loaded and * Scans files, directories, etc. for Dependencies. Analyzers are loaded and
@@ -61,7 +67,7 @@ public class Engine implements FileFilter {
/** /**
* The list of dependencies. * The list of dependencies.
*/ */
private List<Dependency> dependencies = new ArrayList<Dependency>(); private final List<Dependency> dependencies = Collections.synchronizedList(new ArrayList<Dependency>());
/** /**
* A Map of analyzers grouped by Analysis phase. * A Map of analyzers grouped by Analysis phase.
*/ */
@@ -157,8 +163,13 @@ public class Engine implements FileFilter {
/** /**
* Get the dependencies identified. * Get the dependencies identified.
* The returned list is a reference to the engine's synchronized list. You must synchronize on it, when you modify
* and iterate over it from multiple threads. E.g. this holds for analyzers supporting parallel processing during
* their analysis phase.
* *
* @return the dependencies identified * @return the dependencies identified
* @see Collections#synchronizedList(List)
* @see Analyzer#supportsParallelProcessing()
*/ */
public List<Dependency> getDependencies() { public List<Dependency> getDependencies() {
return dependencies; return dependencies;
@@ -170,7 +181,10 @@ public class Engine implements FileFilter {
* @param dependencies the dependencies * @param dependencies the dependencies
*/ */
public void setDependencies(List<Dependency> dependencies) { public void setDependencies(List<Dependency> dependencies) {
this.dependencies = dependencies; synchronized (this.dependencies) {
this.dependencies.clear();
this.dependencies.addAll(dependencies);
}
} }
/** /**
@@ -311,16 +325,18 @@ public class Engine implements FileFilter {
dependency = new Dependency(file); dependency = new Dependency(file);
String sha1 = dependency.getSha1sum(); String sha1 = dependency.getSha1sum();
boolean found = false; boolean found = false;
if (sha1 != null) { synchronized (dependencies) {
for (Dependency existing : dependencies) { if (sha1 != null) {
if (sha1.equals(existing.getSha1sum())) { for (Dependency existing : dependencies) {
found = true; if (sha1.equals(existing.getSha1sum())) {
dependency = existing; found = true;
dependency = existing;
}
} }
} }
} if (!found) {
if (!found) { dependencies.add(dependency);
dependencies.add(dependency); }
} }
} else { } else {
LOGGER.debug("Path passed to scanFile(File) is not a file: {}. Skipping the file.", file); LOGGER.debug("Path passed to scanFile(File) is not a file: {}. Skipping the file.", file);
@@ -345,7 +361,7 @@ public class Engine implements FileFilter {
* during analysis * during analysis
*/ */
public void analyzeDependencies() throws ExceptionCollection { public void analyzeDependencies() throws ExceptionCollection {
final List<Throwable> exceptions = new ArrayList<Throwable>(); final List<Throwable> exceptions = Collections.synchronizedList(new ArrayList<Throwable>());
boolean autoUpdate = true; boolean autoUpdate = true;
try { try {
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE); autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
@@ -368,15 +384,9 @@ public class Engine implements FileFilter {
try { try {
ensureDataExists(); ensureDataExists();
} catch (NoDataException ex) { } catch (NoDataException ex) {
LOGGER.error("{}\n\nUnable to continue dependency-check analysis.", ex.getMessage()); throwFatalExceptionCollection("Unable to continue dependency-check analysis.", ex, exceptions);
LOGGER.debug("", ex);
exceptions.add(ex);
throw new ExceptionCollection("Unable to continue dependency-check analysis.", exceptions, true);
} catch (DatabaseException ex) { } catch (DatabaseException ex) {
LOGGER.error("{}\n\nUnable to continue dependency-check analysis.", ex.getMessage()); throwFatalExceptionCollection("Unable to connect to the dependency-check database.", ex, exceptions);
LOGGER.debug("", ex);
exceptions.add(ex);
throw new ExceptionCollection("Unable to connect to the dependency-check database", exceptions, true);
} }
LOGGER.debug("\n----------------------------------------------------\nBEGIN ANALYSIS\n----------------------------------------------------"); LOGGER.debug("\n----------------------------------------------------\nBEGIN ANALYSIS\n----------------------------------------------------");
@@ -387,42 +397,17 @@ public class Engine implements FileFilter {
for (AnalysisPhase phase : AnalysisPhase.values()) { for (AnalysisPhase phase : AnalysisPhase.values()) {
final List<Analyzer> analyzerList = analyzers.get(phase); final List<Analyzer> analyzerList = analyzers.get(phase);
for (Analyzer a : analyzerList) { for (final Analyzer a : analyzerList) {
Stopwatch watch = Stopwatch.createStarted();
try { try {
a = initializeAnalyzer(a); initializeAnalyzer(a);
} catch (InitializationException ex) { } catch (InitializationException ex) {
exceptions.add(ex); exceptions.add(ex);
continue; continue;
} }
/* need to create a copy of the collection because some of the executeAnalysisTasks(exceptions, a);
* analyzers may modify it. This prevents ConcurrentModificationExceptions. LOGGER.info("Finished {}. Took {} secs.", a.getName(), watch.elapsed(TimeUnit.SECONDS));
* This is okay for adds/deletes because it happens per analyzer.
*/
LOGGER.debug("Begin Analyzer '{}'", a.getName());
final Set<Dependency> dependencySet = new HashSet<Dependency>(dependencies);
for (Dependency d : dependencySet) {
boolean shouldAnalyze = true;
if (a instanceof FileTypeAnalyzer) {
final FileTypeAnalyzer fAnalyzer = (FileTypeAnalyzer) a;
shouldAnalyze = fAnalyzer.accept(d.getActualFile());
}
if (shouldAnalyze) {
LOGGER.debug("Begin Analysis of '{}'", d.getActualFilePath());
try {
a.analyze(d, this);
} catch (AnalysisException ex) {
LOGGER.warn("An error occurred while analyzing '{}'.", d.getActualFilePath());
LOGGER.debug("", ex);
exceptions.add(ex);
} catch (Throwable ex) {
//final AnalysisException ax = new AnalysisException(axMsg, ex);
LOGGER.warn("An unexpected error occurred during analysis of '{}'", d.getActualFilePath());
LOGGER.debug("", ex);
exceptions.add(ex);
}
}
}
} }
} }
for (AnalysisPhase phase : AnalysisPhase.values()) { for (AnalysisPhase phase : AnalysisPhase.values()) {
@@ -440,6 +425,51 @@ public class Engine implements FileFilter {
} }
} }
private void executeAnalysisTasks(List<Throwable> exceptions, Analyzer analyzer) throws ExceptionCollection {
LOGGER.debug("Starting {}", analyzer.getName());
final List<AnalysisTask> analysisTasks = getAnalysisTasks(analyzer, exceptions);
final ExecutorService executorService = getExecutorService(analyzer);
try {
List<Future<Void>> results = executorService.invokeAll(analysisTasks, 10, TimeUnit.MINUTES);
// ensure there was no exception during execution
for (Future<Void> result : results) {
try {
result.get();
} catch (ExecutionException e) {
throwFatalExceptionCollection("Analysis task failed with a fatal exception.", e, exceptions);
}
}
} catch (InterruptedException e) {
throwFatalExceptionCollection("Analysis has been interrupted.", e, exceptions);
}
}
private List<AnalysisTask> getAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) {
final List<AnalysisTask> result = new ArrayList<AnalysisTask>();
synchronized (dependencies) {
for (final Dependency dependency : dependencies) {
AnalysisTask task = new AnalysisTask(analyzer, dependency, this, exceptions);
result.add(task);
}
}
return result;
}
private ExecutorService getExecutorService(Analyzer analyzer) {
if (analyzer.supportsParallelProcessing()) {
// just a fair trade-off that should be reasonable for all analyzer types
int maximumNumberOfThreads = 4 * Runtime.getRuntime().availableProcessors();
LOGGER.debug("Parallel processing with up to {} threads: {}.", maximumNumberOfThreads, analyzer.getName());
return Executors.newFixedThreadPool(maximumNumberOfThreads);
} else {
LOGGER.debug("Parallel processing is not supported: {}.", analyzer.getName());
return Executors.newSingleThreadExecutor();
}
}
/** /**
* Initializes the given analyzer. * Initializes the given analyzer.
* *
@@ -582,4 +612,11 @@ public class Engine implements FileFilter {
cve.close(); cve.close();
} }
} }
private void throwFatalExceptionCollection(String message, Throwable throwable, List<Throwable> exceptions) throws ExceptionCollection {
LOGGER.error("{}\n\n{}", throwable.getMessage(), message);
LOGGER.debug("", throwable);
exceptions.add(throwable);
throw new ExceptionCollection(message, exceptions, true);
}
} }

View File

@@ -46,4 +46,12 @@ public abstract class AbstractAnalyzer implements Analyzer {
public void close() throws Exception { public void close() throws Exception {
//do nothing //do nothing
} }
/**
* The default is to support parallel processing.
*/
@Override
public boolean supportsParallelProcessing() {
return true;
}
} }

View File

@@ -83,7 +83,7 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
/** /**
* A flag indicating whether or not the analyzer is enabled. * A flag indicating whether or not the analyzer is enabled.
*/ */
private boolean enabled = true; private volatile boolean enabled = true;
/** /**
* Get the value of enabled. * Get the value of enabled.

View File

@@ -75,4 +75,12 @@ public interface Analyzer {
* @throws Exception is thrown if an exception occurs closing the analyzer. * @throws Exception is thrown if an exception occurs closing the analyzer.
*/ */
void close() throws Exception; void close() throws Exception;
/**
* Returns whether multiple instances of the same type of analyzer can run in parallel.
* Note that running analyzers of different types in parallel is not supported at all.
*
* @return {@code true} if the analyzer supports parallel processing, {@code false} else
*/
boolean supportsParallelProcessing();
} }

View File

@@ -220,6 +220,17 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
} }
} }
/**
* Does not support parallel processing as it both modifies and iterates over the engine's list of dependencies.
*
* @see #analyzeFileType(Dependency, Engine)
* @see #findMoreDependencies(Engine, File)
*/
@Override
public boolean supportsParallelProcessing() {
return false;
}
/** /**
* Analyzes a given dependency. If the dependency is an archive, such as a * Analyzes a given dependency. If the dependency is an archive, such as a
* WAR or EAR, the contents are extracted, scanned, and added to the list of * WAR or EAR, the contents are extracted, scanned, and added to the list of

View File

@@ -72,10 +72,6 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
* The temp value for GrokAssembly.exe * The temp value for GrokAssembly.exe
*/ */
private File grokAssemblyExe = null; private File grokAssemblyExe = null;
/**
* The DocumentBuilder for parsing the XML
*/
private DocumentBuilder builder;
/** /**
* Logger * Logger
*/ */
@@ -128,6 +124,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
try { try {
final Process proc = pb.start(); final Process proc = pb.start();
DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
doc = builder.parse(proc.getInputStream()); doc = builder.parse(proc.getInputStream());
// Try evacuating the error stream // Try evacuating the error stream
@@ -176,6 +173,8 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
product, Confidence.HIGH)); product, Confidence.HIGH));
} }
} catch (ParserConfigurationException pce) {
throw new AnalysisException("Error initializing the assembly analyzer", pce);
} catch (IOException ioe) { } catch (IOException ioe) {
throw new AnalysisException(ioe); throw new AnalysisException(ioe);
} catch (SAXException saxe) { } catch (SAXException saxe) {
@@ -233,9 +232,9 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
// Now, need to see if GrokAssembly actually runs from this location. // Now, need to see if GrokAssembly actually runs from this location.
final List<String> args = buildArgumentList(); final List<String> args = buildArgumentList();
//TODO this creaes an "unreported" error - if someone doesn't look //TODO this creates an "unreported" error - if someone doesn't look
// at the command output this could easily be missed (especially in an // at the command output this could easily be missed (especially in an
// Ant or Mmaven build. // Ant or Maven build.
// //
// We need to create a non-fatal warning error type that will // We need to create a non-fatal warning error type that will
// get added to the report. // get added to the report.
@@ -275,12 +274,6 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
setEnabled(false); setEnabled(false);
throw new InitializationException("An error occurred with the .NET AssemblyAnalyzer", e); throw new InitializationException("An error occurred with the .NET AssemblyAnalyzer", e);
} }
try {
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
} catch (ParserConfigurationException ex) {
setEnabled(false);
throw new InitializationException("Error initializing the assembly analyzer", ex);
}
} }
/** /**

View File

@@ -178,11 +178,7 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
} }
} }
} else { } else {
// copy, alter and set in case some other thread is iterating over engine.getDependencies().remove(dependency);
final List<Dependency> dependencies = new ArrayList<Dependency>(
engine.getDependencies());
dependencies.remove(dependency);
engine.setDependencies(dependencies);
} }
} }

View File

@@ -92,24 +92,10 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(".cmake") private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(".cmake")
.addFilenames("CMakeLists.txt").build(); .addFilenames("CMakeLists.txt").build();
/**
* A reference to SHA1 message digest.
*/
private static MessageDigest sha1 = null;
static {
try {
sha1 = MessageDigest.getInstance("SHA1");
} catch (NoSuchAlgorithmException e) {
LOGGER.error(e.getMessage());
}
}
/** /**
* Returns the name of the CMake analyzer. * Returns the name of the CMake analyzer.
* *
* @return the name of the analyzer * @return the name of the analyzer
*
*/ */
@Override @Override
public String getName() { public String getName() {
@@ -137,13 +123,19 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* No-op initializer implementation. * Initializes the analyzer.
* *
* @throws InitializationException never thrown * @throws InitializationException thrown if an exception occurs getting an
* instance of SHA1
*/ */
@Override @Override
protected void initializeFileTypeAnalyzer() throws InitializationException { protected void initializeFileTypeAnalyzer() throws InitializationException {
// Nothing to do here. try {
getSha1MessageDigest();
} catch (IllegalStateException ex) {
setEnabled(false);
throw new InitializationException("Unable to create SHA1 MessageDigest", ex);
}
} }
/** /**
@@ -229,6 +221,7 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
} catch (UnsupportedEncodingException ex) { } catch (UnsupportedEncodingException ex) {
path = filePath.getBytes(); path = filePath.getBytes();
} }
MessageDigest sha1 = getSha1MessageDigest();
currentDep.setSha1sum(Checksum.getHex(sha1.digest(path))); currentDep.setSha1sum(Checksum.getHex(sha1.digest(path)));
engine.getDependencies().add(currentDep); engine.getDependencies().add(currentDep);
} }
@@ -245,4 +238,13 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
protected String getAnalyzerEnabledSettingKey() { protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_CMAKE_ENABLED; return Settings.KEYS.ANALYZER_CMAKE_ENABLED;
} }
private MessageDigest getSha1MessageDigest() {
try {
return MessageDigest.getInstance("SHA1");
} catch (NoSuchAlgorithmException e) {
LOGGER.error(e.getMessage());
throw new IllegalStateException("Failed to obtain the SHA1 message digest.", e);
}
}
} }

View File

@@ -59,7 +59,7 @@ import org.slf4j.LoggerFactory;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class CPEAnalyzer implements Analyzer { public class CPEAnalyzer extends AbstractAnalyzer {
/** /**
* The Logger. * The Logger.

View File

@@ -75,7 +75,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
* The analyzer should be disabled if there are errors, so this is a flag to * The analyzer should be disabled if there are errors, so this is a flag to
* determine if such an error has occurred. * determine if such an error has occurred.
*/ */
private boolean errorFlag = false; private volatile boolean errorFlag = false;
/** /**
* The searcher itself. * The searcher itself.

View File

@@ -24,6 +24,7 @@ import org.owasp.dependencycheck.data.composer.ComposerException;
import org.owasp.dependencycheck.data.composer.ComposerLockParser; import org.owasp.dependencycheck.data.composer.ComposerLockParser;
import org.owasp.dependencycheck.dependency.Confidence; import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.Checksum; import org.owasp.dependencycheck.utils.Checksum;
import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
@@ -36,7 +37,6 @@ import java.io.FileNotFoundException;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.security.MessageDigest; import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import org.owasp.dependencycheck.exception.InitializationException;
/** /**
* Used to analyze a composer.lock file for a composer PHP app. * Used to analyze a composer.lock file for a composer PHP app.
@@ -85,19 +85,13 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
@Override @Override
protected void initializeFileTypeAnalyzer() throws InitializationException { protected void initializeFileTypeAnalyzer() throws InitializationException {
try { try {
sha1 = MessageDigest.getInstance("SHA1"); getSha1MessageDigest();
} catch (NoSuchAlgorithmException ex) { } catch (IllegalStateException ex) {
setEnabled(false); setEnabled(false);
throw new InitializationException("Unable to create SHA1 MmessageDigest", ex); throw new InitializationException("Unable to create SHA1 MessageDigest", ex);
} }
} }
/**
* The MessageDigest for calculating a new digest for the new dependencies
* added.
*/
private MessageDigest sha1 = null;
/** /**
* Entry point for the analyzer. * Entry point for the analyzer.
* *
@@ -117,6 +111,7 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
final Dependency d = new Dependency(dependency.getActualFile()); final Dependency d = new Dependency(dependency.getActualFile());
d.setDisplayFileName(String.format("%s:%s/%s", dependency.getDisplayFileName(), dep.getGroup(), dep.getProject())); d.setDisplayFileName(String.format("%s:%s/%s", dependency.getDisplayFileName(), dep.getGroup(), dep.getProject()));
final String filePath = String.format("%s:%s/%s", dependency.getFilePath(), dep.getGroup(), dep.getProject()); final String filePath = String.format("%s:%s/%s", dependency.getFilePath(), dep.getGroup(), dep.getProject());
MessageDigest sha1 = getSha1MessageDigest();
d.setFilePath(filePath); d.setFilePath(filePath);
d.setSha1sum(Checksum.getHex(sha1.digest(filePath.getBytes(Charset.defaultCharset())))); d.setSha1sum(Checksum.getHex(sha1.digest(filePath.getBytes(Charset.defaultCharset()))));
d.getVendorEvidence().addEvidence(COMPOSER_LOCK, "vendor", dep.getGroup(), Confidence.HIGHEST); d.getVendorEvidence().addEvidence(COMPOSER_LOCK, "vendor", dep.getGroup(), Confidence.HIGHEST);
@@ -169,4 +164,13 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
public AnalysisPhase getAnalysisPhase() { public AnalysisPhase getAnalysisPhase() {
return AnalysisPhase.INFORMATION_COLLECTION; return AnalysisPhase.INFORMATION_COLLECTION;
} }
private MessageDigest getSha1MessageDigest() {
try {
return MessageDigest.getInstance("SHA1");
} catch (NoSuchAlgorithmException e) {
LOGGER.error(e.getMessage());
throw new IllegalStateException("Failed to obtain the SHA1 message digest.", e);
}
}
} }

View File

@@ -46,7 +46,7 @@ import org.slf4j.LoggerFactory;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Analyzer { public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
/** /**
* The Logger. * The Logger.
@@ -94,6 +94,16 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
} }
//</editor-fold> //</editor-fold>
/**
* Does not support parallel processing as it both modifies and iterates over the engine's list of dependencies.
*
* @see #analyze(Dependency, Engine)
*/
@Override
public boolean supportsParallelProcessing() {
return false;
}
/** /**
* Analyzes a set of dependencies. If they have been found to have the same * Analyzes a set of dependencies. If they have been found to have the same
* base path and the same set of identifiers they are likely related. The * base path and the same set of identifiers they are likely related. The

View File

@@ -423,28 +423,30 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
String parentPath = dependency.getFilePath().toLowerCase(); String parentPath = dependency.getFilePath().toLowerCase();
if (parentPath.contains(".jar")) { if (parentPath.contains(".jar")) {
parentPath = parentPath.substring(0, parentPath.indexOf(".jar") + 4); parentPath = parentPath.substring(0, parentPath.indexOf(".jar") + 4);
final Dependency parent = findDependency(parentPath, engine.getDependencies()); final List<Dependency> dependencies = engine.getDependencies();
if (parent != null) { synchronized (dependencies) {
boolean remove = false; final Dependency parent = findDependency(parentPath, dependencies);
for (Identifier i : dependency.getIdentifiers()) { if (parent != null) {
if ("cpe".equals(i.getType())) { boolean remove = false;
final String trimmedCPE = trimCpeToVendor(i.getValue()); for (Identifier i : dependency.getIdentifiers()) {
for (Identifier parentId : parent.getIdentifiers()) { if ("cpe".equals(i.getType())) {
if ("cpe".equals(parentId.getType()) && parentId.getValue().startsWith(trimmedCPE)) { final String trimmedCPE = trimCpeToVendor(i.getValue());
remove |= true; for (Identifier parentId : parent.getIdentifiers()) {
if ("cpe".equals(parentId.getType()) && parentId.getValue().startsWith(trimmedCPE)) {
remove |= true;
}
} }
} }
if (!remove) { //we can escape early
return;
}
} }
if (!remove) { //we can escape early if (remove) {
return; dependencies.remove(dependency);
} }
} }
if (remove) {
engine.getDependencies().remove(dependency);
}
} }
} }
} }
} }

View File

@@ -34,7 +34,7 @@ import org.owasp.dependencycheck.utils.DependencyVersionUtil;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class FileNameAnalyzer extends AbstractAnalyzer implements Analyzer { public class FileNameAnalyzer extends AbstractAnalyzer {
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer"> //<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/** /**

View File

@@ -51,7 +51,7 @@ import org.xml.sax.SAXException;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class HintAnalyzer extends AbstractAnalyzer implements Analyzer { public class HintAnalyzer extends AbstractAnalyzer {
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer"> //<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/** /**

View File

@@ -26,7 +26,6 @@ import java.io.InputStreamReader;
import java.io.OutputStream; import java.io.OutputStream;
import java.io.Reader; import java.io.Reader;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@@ -35,6 +34,7 @@ import java.util.Map.Entry;
import java.util.Properties; import java.util.Properties;
import java.util.Set; import java.util.Set;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.jar.Attributes; import java.util.jar.Attributes;
import java.util.jar.JarEntry; import java.util.jar.JarEntry;
import java.util.jar.JarFile; import java.util.jar.JarFile;
@@ -76,7 +76,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* The count of directories created during analysis. This is used for * The count of directories created during analysis. This is used for
* creating temporary directories. * creating temporary directories.
*/ */
private static int dirCount = 0; private static final AtomicInteger DIR_COUNT = new AtomicInteger(0);
/** /**
* The system independent newline character. * The system independent newline character.
*/ */
@@ -318,7 +318,6 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
pom.processProperties(pomProperties); pom.processProperties(pomProperties);
setPomEvidence(newDependency, pom, null); setPomEvidence(newDependency, pom, null);
engine.getDependencies().add(newDependency); engine.getDependencies().add(newDependency);
Collections.sort(engine.getDependencies());
} else { } else {
if (externalPom == null) { if (externalPom == null) {
pom = PomUtils.readPom(path, jar); pom = PomUtils.readPom(path, jar);
@@ -1218,7 +1217,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException thrown if unable to create temporary directory * @throws AnalysisException thrown if unable to create temporary directory
*/ */
private File getNextTempDirectory() throws AnalysisException { private File getNextTempDirectory() throws AnalysisException {
dirCount += 1; final int dirCount = DIR_COUNT.incrementAndGet();
final File directory = new File(tempFileLocation, String.valueOf(dirCount)); final File directory = new File(tempFileLocation, String.valueOf(dirCount));
//getting an exception for some directories not being able to be created; might be because the directory already exists? //getting an exception for some directories not being able to be created; might be because the directory already exists?
if (directory.exists()) { if (directory.exists()) {

View File

@@ -36,7 +36,7 @@ import org.slf4j.LoggerFactory;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class NvdCveAnalyzer implements Analyzer { public class NvdCveAnalyzer extends AbstractAnalyzer {
/** /**
* The Logger for use throughout the class * The Logger for use throughout the class
*/ */

View File

@@ -45,6 +45,7 @@ import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.FileUtils; import org.owasp.dependencycheck.utils.FileUtils;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.UrlStringUtils; import org.owasp.dependencycheck.utils.UrlStringUtils;
import java.util.concurrent.atomic.AtomicInteger;
/** /**
* Used to analyze a Wheel or egg distribution files, or their contents in * Used to analyze a Wheel or egg distribution files, or their contents in
@@ -76,7 +77,7 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
* The count of directories created during analysis. This is used for * The count of directories created during analysis. This is used for
* creating temporary directories. * creating temporary directories.
*/ */
private static int dirCount = 0; private static final AtomicInteger DIR_COUNT = new AtomicInteger(0);
/** /**
* The name of the analyzer. * The name of the analyzer.
@@ -391,7 +392,7 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
// getting an exception for some directories not being able to be // getting an exception for some directories not being able to be
// created; might be because the directory already exists? // created; might be because the directory already exists?
do { do {
dirCount += 1; final int dirCount = DIR_COUNT.incrementAndGet();
directory = new File(tempFileLocation, String.valueOf(dirCount)); directory = new File(tempFileLocation, String.valueOf(dirCount));
} while (directory.exists()); } while (directory.exists());
if (!directory.mkdirs()) { if (!directory.mkdirs()) {

View File

@@ -193,11 +193,7 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
} }
} }
} else { } else {
// copy, alter and set in case some other thread is iterating over engine.getDependencies().remove(dependency);
final List<Dependency> dependencies = new ArrayList<Dependency>(
engine.getDependencies());
dependencies.remove(dependency);
engine.setDependencies(dependencies);
} }
} }

View File

@@ -51,7 +51,7 @@ public class CentralSearch {
/** /**
* Whether to use the Proxy when making requests * Whether to use the Proxy when making requests
*/ */
private boolean useProxy; private final boolean useProxy;
/** /**
* Used for logging. * Used for logging.

View File

@@ -25,6 +25,7 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.xpath.XPath; import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathFactory; import javax.xml.xpath.XPathFactory;
import org.owasp.dependencycheck.utils.InvalidSettingException; import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.URLConnectionFactory; import org.owasp.dependencycheck.utils.URLConnectionFactory;
@@ -47,7 +48,7 @@ public class NexusSearch {
/** /**
* Whether to use the Proxy when making requests. * Whether to use the Proxy when making requests.
*/ */
private boolean useProxy; private final boolean useProxy;
/** /**
* Used for logging. * Used for logging.
*/ */
@@ -61,17 +62,17 @@ public class NexusSearch {
*/ */
public NexusSearch(URL rootURL) { public NexusSearch(URL rootURL) {
this.rootURL = rootURL; this.rootURL = rootURL;
useProxy = useProxy();
LOGGER.debug("Using proxy: {}", useProxy);
}
private boolean useProxy() {
try { try {
if (null != Settings.getString(Settings.KEYS.PROXY_SERVER) return Settings.getString(Settings.KEYS.PROXY_SERVER) != null
&& Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY)) { && Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY);
useProxy = true;
LOGGER.debug("Using proxy");
} else {
useProxy = false;
LOGGER.debug("Not using proxy");
}
} catch (InvalidSettingException ise) { } catch (InvalidSettingException ise) {
useProxy = false; LOGGER.warn("Failed to parse proxy settings.", ise);
return false;
} }
} }

View File

@@ -119,7 +119,7 @@ public class CveDB {
* @throws DatabaseException thrown if there is an error opening the * @throws DatabaseException thrown if there is an error opening the
* database connection * database connection
*/ */
public final void open() throws DatabaseException { public synchronized final void open() throws DatabaseException {
if (!isOpen()) { if (!isOpen()) {
conn = ConnectionFactory.getConnection(); conn = ConnectionFactory.getConnection();
} }
@@ -129,7 +129,7 @@ public class CveDB {
* Closes the DB4O database. Close should be called on this object when it * Closes the DB4O database. Close should be called on this object when it
* is done being used. * is done being used.
*/ */
public void close() { public synchronized void close() {
if (conn != null) { if (conn != null) {
try { try {
conn.close(); conn.close();
@@ -149,7 +149,7 @@ public class CveDB {
* *
* @return whether the database connection is open or closed * @return whether the database connection is open or closed
*/ */
public boolean isOpen() { public synchronized boolean isOpen() {
return conn != null; return conn != null;
} }
@@ -158,7 +158,7 @@ public class CveDB {
* *
* @throws SQLException thrown if a SQL Exception occurs * @throws SQLException thrown if a SQL Exception occurs
*/ */
public void commit() throws SQLException { public synchronized void commit() throws SQLException {
//temporary remove this as autocommit is on. //temporary remove this as autocommit is on.
//if (conn != null) { //if (conn != null) {
// conn.commit(); // conn.commit();
@@ -202,7 +202,7 @@ public class CveDB {
* analyzed * analyzed
* @return a set of vulnerable software * @return a set of vulnerable software
*/ */
public Set<VulnerableSoftware> getCPEs(String vendor, String product) { public synchronized Set<VulnerableSoftware> getCPEs(String vendor, String product) {
final Set<VulnerableSoftware> cpe = new HashSet<VulnerableSoftware>(); final Set<VulnerableSoftware> cpe = new HashSet<VulnerableSoftware>();
ResultSet rs = null; ResultSet rs = null;
PreparedStatement ps = null; PreparedStatement ps = null;
@@ -234,7 +234,7 @@ public class CveDB {
* @throws DatabaseException thrown when there is an error retrieving the * @throws DatabaseException thrown when there is an error retrieving the
* data from the DB * data from the DB
*/ */
public Set<Pair<String, String>> getVendorProductList() throws DatabaseException { public synchronized Set<Pair<String, String>> getVendorProductList() throws DatabaseException {
final Set<Pair<String, String>> data = new HashSet<Pair<String, String>>(); final Set<Pair<String, String>> data = new HashSet<Pair<String, String>>();
ResultSet rs = null; ResultSet rs = null;
PreparedStatement ps = null; PreparedStatement ps = null;
@@ -328,7 +328,7 @@ public class CveDB {
* @return a list of Vulnerabilities * @return a list of Vulnerabilities
* @throws DatabaseException thrown if there is an exception retrieving data * @throws DatabaseException thrown if there is an exception retrieving data
*/ */
public List<Vulnerability> getVulnerabilities(String cpeStr) throws DatabaseException { public synchronized List<Vulnerability> getVulnerabilities(String cpeStr) throws DatabaseException {
final VulnerableSoftware cpe = new VulnerableSoftware(); final VulnerableSoftware cpe = new VulnerableSoftware();
try { try {
cpe.parseName(cpeStr); cpe.parseName(cpeStr);
@@ -389,7 +389,7 @@ public class CveDB {
* @return a vulnerability object * @return a vulnerability object
* @throws DatabaseException if an exception occurs * @throws DatabaseException if an exception occurs
*/ */
public Vulnerability getVulnerability(String cve) throws DatabaseException { public synchronized Vulnerability getVulnerability(String cve) throws DatabaseException {
PreparedStatement psV = null; PreparedStatement psV = null;
PreparedStatement psR = null; PreparedStatement psR = null;
PreparedStatement psS = null; PreparedStatement psS = null;
@@ -462,7 +462,7 @@ public class CveDB {
* @param vuln the vulnerability to add to the database * @param vuln the vulnerability to add to the database
* @throws DatabaseException is thrown if the database * @throws DatabaseException is thrown if the database
*/ */
public void updateVulnerability(Vulnerability vuln) throws DatabaseException { public synchronized void updateVulnerability(Vulnerability vuln) throws DatabaseException {
PreparedStatement selectVulnerabilityId = null; PreparedStatement selectVulnerabilityId = null;
PreparedStatement deleteVulnerability = null; PreparedStatement deleteVulnerability = null;
PreparedStatement deleteReferences = null; PreparedStatement deleteReferences = null;
@@ -638,7 +638,7 @@ public class CveDB {
* *
* @return <code>true</code> if data exists; otherwise <code>false</code> * @return <code>true</code> if data exists; otherwise <code>false</code>
*/ */
public boolean dataExists() { public synchronized boolean dataExists() {
Statement cs = null; Statement cs = null;
ResultSet rs = null; ResultSet rs = null;
try { try {
@@ -674,7 +674,7 @@ public class CveDB {
* updates. This should be called after all updates have been completed to * updates. This should be called after all updates have been completed to
* ensure orphan entries are removed. * ensure orphan entries are removed.
*/ */
public void cleanupDatabase() { public synchronized void cleanupDatabase() {
PreparedStatement ps = null; PreparedStatement ps = null;
try { try {
ps = getConnection().prepareStatement(statementBundle.getString("CLEANUP_ORPHANS")); ps = getConnection().prepareStatement(statementBundle.getString("CLEANUP_ORPHANS"));
@@ -812,7 +812,7 @@ public class CveDB {
* *
* Deletes unused dictionary entries from the database. * Deletes unused dictionary entries from the database.
*/ */
public void deleteUnusedCpe() { public synchronized void deleteUnusedCpe() {
PreparedStatement ps = null; PreparedStatement ps = null;
try { try {
ps = getConnection().prepareStatement(statementBundle.getString("DELETE_UNUSED_DICT_CPE")); ps = getConnection().prepareStatement(statementBundle.getString("DELETE_UNUSED_DICT_CPE"));
@@ -834,7 +834,7 @@ public class CveDB {
* @param vendor the CPE vendor * @param vendor the CPE vendor
* @param product the CPE product * @param product the CPE product
*/ */
public void addCpe(String cpe, String vendor, String product) { public synchronized void addCpe(String cpe, String vendor, String product) {
PreparedStatement ps = null; PreparedStatement ps = null;
try { try {
ps = getConnection().prepareStatement(statementBundle.getString("ADD_DICT_CPE")); ps = getConnection().prepareStatement(statementBundle.getString("ADD_DICT_CPE"));