Fixed merge conflict in App.java

This commit is contained in:
Dale Visser
2015-11-23 13:27:22 -05:00
126 changed files with 2556 additions and 1296 deletions

View File

@@ -38,6 +38,7 @@ import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileFilter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EnumMap;
import java.util.HashSet;
import java.util.Iterator;
@@ -174,8 +175,7 @@ public class Engine implements FileFilter {
public List<Dependency> scan(String[] paths) {
final List<Dependency> deps = new ArrayList<Dependency>();
for (String path : paths) {
final File file = new File(path);
final List<Dependency> d = scan(file);
final List<Dependency> d = scan(path);
if (d != null) {
deps.addAll(d);
}
@@ -215,33 +215,14 @@ public class Engine implements FileFilter {
}
/**
* Scans a list of files or directories. If a directory is specified, it will be scanned recursively. Any dependencies
* Scans a collection of files or directories. If a directory is specified, it will be scanned recursively. Any dependencies
* identified are added to the dependency collection.
*
* @param files a set of paths to files or directories to be analyzed
* @return the list of dependencies scanned
* @since v0.3.2.5
*/
public List<Dependency> scan(Set<File> files) {
final List<Dependency> deps = new ArrayList<Dependency>();
for (File file : files) {
final List<Dependency> d = scan(file);
if (d != null) {
deps.addAll(d);
}
}
return deps;
}
/**
* Scans a list of files or directories. If a directory is specified, it will be scanned recursively. Any dependencies
* identified are added to the dependency collection.
*
* @param files a set of paths to files or directories to be analyzed
* @return the list of dependencies scanned
* @since v0.3.2.5
*/
public List<Dependency> scan(List<File> files) {
public List<Dependency> scan(Collection<File> files) {
final List<Dependency> deps = new ArrayList<Dependency>();
for (File file : files) {
final List<Dependency> d = scan(file);

View File

@@ -840,8 +840,7 @@ public class DependencyCheckScanAgent {
*/
private Engine executeDependencyCheck() throws DatabaseException {
populateSettings();
Engine engine = null;
engine = new Engine();
final Engine engine = new Engine();
engine.setDependencies(this.dependencies);
engine.analyzeDependencies();
return engine;
@@ -898,67 +897,28 @@ public class DependencyCheckScanAgent {
}
Settings.setBoolean(Settings.KEYS.AUTO_UPDATE, autoUpdate);
if (proxyServer != null && !proxyServer.isEmpty()) {
Settings.setString(Settings.KEYS.PROXY_SERVER, proxyServer);
}
if (proxyPort != null && !proxyPort.isEmpty()) {
Settings.setString(Settings.KEYS.PROXY_PORT, proxyPort);
}
if (proxyUsername != null && !proxyUsername.isEmpty()) {
Settings.setString(Settings.KEYS.PROXY_USERNAME, proxyUsername);
}
if (proxyPassword != null && !proxyPassword.isEmpty()) {
Settings.setString(Settings.KEYS.PROXY_PASSWORD, proxyPassword);
}
if (connectionTimeout != null && !connectionTimeout.isEmpty()) {
Settings.setString(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
}
if (suppressionFile != null && !suppressionFile.isEmpty()) {
Settings.setString(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
}
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUsername);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPassword);
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, centralAnalyzerEnabled);
if (centralUrl != null && !centralUrl.isEmpty()) {
Settings.setString(Settings.KEYS.ANALYZER_CENTRAL_URL, centralUrl);
}
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_CENTRAL_URL, centralUrl);
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, nexusAnalyzerEnabled);
if (nexusUrl != null && !nexusUrl.isEmpty()) {
Settings.setString(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
}
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_PROXY, nexusUsesProxy);
if (databaseDriverName != null && !databaseDriverName.isEmpty()) {
Settings.setString(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
}
if (databaseDriverPath != null && !databaseDriverPath.isEmpty()) {
Settings.setString(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
}
if (connectionString != null && !connectionString.isEmpty()) {
Settings.setString(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
}
if (databaseUser != null && !databaseUser.isEmpty()) {
Settings.setString(Settings.KEYS.DB_USER, databaseUser);
}
if (databasePassword != null && !databasePassword.isEmpty()) {
Settings.setString(Settings.KEYS.DB_PASSWORD, databasePassword);
}
if (zipExtensions != null && !zipExtensions.isEmpty()) {
Settings.setString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions);
}
if (cveUrl12Modified != null && !cveUrl12Modified.isEmpty()) {
Settings.setString(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified);
}
if (cveUrl20Modified != null && !cveUrl20Modified.isEmpty()) {
Settings.setString(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified);
}
if (cveUrl12Base != null && !cveUrl12Base.isEmpty()) {
Settings.setString(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base);
}
if (cveUrl20Base != null && !cveUrl20Base.isEmpty()) {
Settings.setString(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base);
}
if (pathToMono != null && !pathToMono.isEmpty()) {
Settings.setString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
}
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
Settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions);
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified);
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified);
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base);
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base);
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
}
/**

View File

@@ -214,7 +214,7 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
* @return a Set of strings.
*/
protected static Set<String> newHashSet(String... strings) {
final Set<String> set = new HashSet<String>();
final Set<String> set = new HashSet<String>(strings.length);
Collections.addAll(set, strings);
return set;
}

View File

@@ -114,8 +114,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
static {
final String additionalZipExt = Settings.getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS);
if (additionalZipExt != null) {
final Set<String> ext = new HashSet<String>(Collections.singletonList(additionalZipExt));
ZIPPABLES.addAll(ext);
String[] ext = additionalZipExt.split("\\s*,\\s*");
Collections.addAll(ZIPPABLES, ext);
}
EXTENSIONS.addAll(ZIPPABLES);
}
@@ -415,11 +415,9 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
FileOutputStream fos = null;
try {
final File parent = file.getParentFile();
if (!parent.isDirectory()) {
if (!parent.mkdirs()) {
final String msg = String.format("Unable to build directory '%s'.", parent.getAbsolutePath());
throw new AnalysisException(msg);
}
if (!parent.isDirectory() && !parent.mkdirs()) {
final String msg = String.format("Unable to build directory '%s'.", parent.getAbsolutePath());
throw new AnalysisException(msg);
}
fos = new FileOutputStream(file);
IOUtils.copy(input, fos);

View File

@@ -17,13 +17,13 @@
*/
package org.owasp.dependencycheck.analyzer;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileFilter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.output.NullOutputStream;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence;
@@ -115,18 +115,15 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
final List<String> args = buildArgumentList();
args.add(dependency.getActualFilePath());
final ProcessBuilder pb = new ProcessBuilder(args);
BufferedReader rdr = null;
Document doc = null;
try {
final Process proc = pb.start();
// Try evacuating the error stream
rdr = new BufferedReader(new InputStreamReader(proc.getErrorStream(), "UTF-8"));
String line = null;
// CHECKSTYLE:OFF
while (rdr.ready() && (line = rdr.readLine()) != null) {
LOGGER.warn("Error from GrokAssembly: {}", line);
final String errorStream = IOUtils.toString(proc.getErrorStream(), "UTF-8");
if (null != errorStream && !errorStream.isEmpty()) {
LOGGER.warn("Error from GrokAssembly: {}", errorStream);
}
// CHECKSTYLE:ON
int rc = 0;
doc = builder.parse(proc.getInputStream());
@@ -176,14 +173,6 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
} catch (XPathExpressionException xpe) {
// This shouldn't happen
throw new AnalysisException(xpe);
} finally {
if (rdr != null) {
try {
rdr.close();
} catch (IOException ex) {
LOGGER.debug("ignore", ex);
}
}
}
}
@@ -200,11 +189,8 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
try {
fos = new FileOutputStream(tempFile);
is = AssemblyAnalyzer.class.getClassLoader().getResourceAsStream("GrokAssembly.exe");
final byte[] buff = new byte[4096];
int bread = -1;
while ((bread = is.read(buff)) >= 0) {
fos.write(buff, 0, bread);
}
IOUtils.copy(is, fos);
grokAssemblyExe = tempFile;
// Set the temp file to get deleted when we're done
grokAssemblyExe.deleteOnExit();
@@ -232,17 +218,12 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
// Now, need to see if GrokAssembly actually runs from this location.
final List<String> args = buildArgumentList();
BufferedReader rdr = null;
try {
final ProcessBuilder pb = new ProcessBuilder(args);
final Process p = pb.start();
// Try evacuating the error stream
rdr = new BufferedReader(new InputStreamReader(p.getErrorStream(), "UTF-8"));
// CHECKSTYLE:OFF
while (rdr.ready() && rdr.readLine() != null) {
// We expect this to complain
}
// CHECKSTYLE:ON
IOUtils.copy(p.getErrorStream(), NullOutputStream.NULL_OUTPUT_STREAM);
final Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(p.getInputStream());
final XPath xpath = XPathFactory.newInstance().newXPath();
final String error = xpath.evaluate("/assembly/error", doc);
@@ -263,14 +244,6 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
this.setEnabled(false);
throw new AnalysisException("An error occured with the .NET AssemblyAnalyzer", e);
}
} finally {
if (rdr != null) {
try {
rdr.close();
} catch (IOException ex) {
LOGGER.trace("ignore", ex);
}
}
}
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
}

View File

@@ -335,7 +335,7 @@ public class CPEAnalyzer implements Analyzer {
* @return if the append was successful.
*/
private boolean appendWeightedSearch(StringBuilder sb, String field, String searchText, Set<String> weightedText) {
sb.append(" ").append(field).append(":( ");
sb.append(' ').append(field).append(":( ");
final String cleanText = cleanseText(searchText);
@@ -349,20 +349,27 @@ public class CPEAnalyzer implements Analyzer {
final StringTokenizer tokens = new StringTokenizer(cleanText);
while (tokens.hasMoreElements()) {
final String word = tokens.nextToken();
String temp = null;
StringBuilder temp = null;
for (String weighted : weightedText) {
final String weightedStr = cleanseText(weighted);
if (equalsIgnoreCaseAndNonAlpha(word, weightedStr)) {
temp = LuceneUtils.escapeLuceneQuery(word) + WEIGHTING_BOOST;
temp = new StringBuilder(word.length() + 2);
LuceneUtils.appendEscapedLuceneQuery(temp, word);
temp.append(WEIGHTING_BOOST);
if (!word.equalsIgnoreCase(weightedStr)) {
temp += " " + LuceneUtils.escapeLuceneQuery(weightedStr) + WEIGHTING_BOOST;
temp.append(' ');
LuceneUtils.appendEscapedLuceneQuery(temp, weightedStr);
temp.append(WEIGHTING_BOOST);
}
break;
}
}
sb.append(' ');
if (temp == null) {
temp = LuceneUtils.escapeLuceneQuery(word);
LuceneUtils.appendEscapedLuceneQuery(sb, word);
} else {
sb.append(temp);
}
sb.append(" ").append(temp);
}
}
sb.append(" ) ");
@@ -515,7 +522,7 @@ public class CPEAnalyzer implements Analyzer {
for (VulnerableSoftware vs : cpes) {
DependencyVersion dbVer;
if (vs.getUpdate() != null && !vs.getUpdate().isEmpty()) {
dbVer = DependencyVersionUtil.parseVersion(vs.getVersion() + "." + vs.getUpdate());
dbVer = DependencyVersionUtil.parseVersion(vs.getVersion() + '.' + vs.getUpdate());
} else {
dbVer = DependencyVersionUtil.parseVersion(vs.getVersion());
}

View File

@@ -192,7 +192,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
final List<MavenArtifact> mas = searcher.searchSha1(dependency.getSha1sum());
final Confidence confidence = mas.size() > 1 ? Confidence.HIGH : Confidence.HIGHEST;
for (MavenArtifact ma : mas) {
LOGGER.debug("Central analyzer found artifact ({}) for dependency ({})", ma.toString(), dependency.getFileName());
LOGGER.debug("Central analyzer found artifact ({}) for dependency ({})", ma, dependency.getFileName());
dependency.addAsEvidence("central", ma, confidence);
boolean pomAnalyzed = false;
for (Evidence e : dependency.getVendorEvidence()) {

View File

@@ -113,7 +113,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
for (Identifier i : dependency.getIdentifiers()) {
if ("maven".contains(i.getType())) {
if (i.getValue() != null && i.getValue().startsWith("org.springframework.")) {
final int endPoint = i.getValue().indexOf(":", 19);
final int endPoint = i.getValue().indexOf(':', 19);
if (endPoint >= 0) {
mustContain = i.getValue().substring(19, endPoint).toLowerCase();
break;
@@ -472,8 +472,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
*/
private String trimCpeToVendor(String value) {
//cpe:/a:jruby:jruby:1.0.8
final int pos1 = value.indexOf(":", 7); //right of vendor
final int pos2 = value.indexOf(":", pos1 + 1); //right of product
final int pos1 = value.indexOf(':', 7); //right of vendor
final int pos2 = value.indexOf(':', pos1 + 1); //right of product
if (pos2 < 0) {
return value;
} else {

View File

@@ -18,6 +18,7 @@
package org.owasp.dependencycheck.analyzer;
import java.io.File;
import org.apache.commons.io.FilenameUtils;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence;
@@ -76,13 +77,7 @@ public class FileNameAnalyzer extends AbstractAnalyzer implements Analyzer {
//strip any path information that may get added by ArchiveAnalyzer, etc.
final File f = dependency.getActualFile();
String fileName = f.getName();
//remove file extension
final int pos = fileName.lastIndexOf(".");
if (pos > 0) {
fileName = fileName.substring(0, pos);
}
final String fileName = FilenameUtils.removeExtension(f.getName());
//add version evidence
final DependencyVersion version = DependencyVersionUtil.parseVersion(fileName);

View File

@@ -42,6 +42,7 @@ import java.util.jar.Manifest;
import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
import org.apache.commons.compress.utils.IOUtils;
import org.apache.commons.io.FilenameUtils;
import org.jsoup.Jsoup;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
@@ -269,8 +270,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
}
File externalPom = null;
if (pomEntries.isEmpty()) {
String pomPath = dependency.getActualFilePath();
pomPath = pomPath.substring(0, pomPath.lastIndexOf('.')) + ".pom";
final String pomPath = FilenameUtils.removeExtension(dependency.getActualFilePath()) + ".pom";
externalPom = new File(pomPath);
if (externalPom.isFile()) {
pomEntries.add(pomPath);

View File

@@ -104,7 +104,7 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
*/
boolean retval = false;
try {
if ((!DEFAULT_URL.equals(Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL)))
if (!DEFAULT_URL.equals(Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL))
&& Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED)) {
LOGGER.info("Enabling Nexus analyzer");
retval = true;

View File

@@ -126,7 +126,7 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
*/
@Override
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
LOGGER.debug("Checking Nuspec file {}", dependency.toString());
LOGGER.debug("Checking Nuspec file {}", dependency);
try {
final NuspecParser parser = new XPathNuspecParser();
NugetPackage np = null;

View File

@@ -73,7 +73,7 @@ public class NvdCveAnalyzer implements Analyzer {
* @return true or false.
*/
public boolean isOpen() {
return (cveDB != null);
return cveDB != null;
}
/**

View File

@@ -90,7 +90,7 @@ public class CentralSearch {
final URL url = new URL(rootURL + String.format("?q=1:\"%s\"&wt=xml", sha1));
LOGGER.debug("Searching Central url {}", url.toString());
LOGGER.debug("Searching Central url {}", url);
// Determine if we need to use a proxy. The rules:
// 1) If the proxy is set, AND the setting is set to true, use the proxy

View File

@@ -24,6 +24,11 @@ package org.owasp.dependencycheck.data.composer;
*/
public class ComposerException extends RuntimeException {
/**
* The serial version UID for serialization.
*/
private static final long serialVersionUID = 1L;
/**
* Creates a ComposerException with default message.
*/

View File

@@ -149,7 +149,6 @@ public final class CpeMemoryIndex {
*
* @return the CPE Analyzer.
*/
@SuppressWarnings("unchecked")
private Analyzer createIndexingAnalyzer() {
final Map<String, Analyzer> fieldAnalyzers = new HashMap<String, Analyzer>();
fieldAnalyzers.put(Fields.DOCUMENT_KEY, new KeywordAnalyzer());
@@ -161,7 +160,6 @@ public final class CpeMemoryIndex {
*
* @return the CPE Analyzer.
*/
@SuppressWarnings("unchecked")
private Analyzer createSearchingAnalyzer() {
final Map<String, Analyzer> fieldAnalyzers = new HashMap<String, Analyzer>();
fieldAnalyzers.put(Fields.DOCUMENT_KEY, new KeywordAnalyzer());
@@ -173,24 +171,6 @@ public final class CpeMemoryIndex {
return new PerFieldAnalyzerWrapper(new FieldAnalyzer(LuceneUtils.CURRENT_VERSION), fieldAnalyzers);
}
/**
* Saves a CPE IndexEntry into the Lucene index.
*
* @param vendor the vendor to index
* @param product the product to index
* @param indexWriter the index writer to write the entry into
* @throws CorruptIndexException is thrown if the index is corrupt
* @throws IOException is thrown if an IOException occurs
*/
public void saveEntry(String vendor, String product, IndexWriter indexWriter) throws CorruptIndexException, IOException {
final Document doc = new Document();
final Field v = new TextField(Fields.VENDOR, vendor, Field.Store.YES);
final Field p = new TextField(Fields.PRODUCT, product, Field.Store.YES);
doc.add(v);
doc.add(p);
indexWriter.addDocument(doc);
}
/**
* Closes the CPE Index.
*/
@@ -230,9 +210,20 @@ public final class CpeMemoryIndex {
final IndexWriterConfig conf = new IndexWriterConfig(LuceneUtils.CURRENT_VERSION, analyzer);
indexWriter = new IndexWriter(index, conf);
try {
// Tip: reuse the Document and Fields for performance...
// See "Re-use Document and Field instances" from
// http://wiki.apache.org/lucene-java/ImproveIndexingSpeed
final Document doc = new Document();
final Field v = new TextField(Fields.VENDOR, Fields.VENDOR, Field.Store.YES);
final Field p = new TextField(Fields.PRODUCT, Fields.PRODUCT, Field.Store.YES);
doc.add(v);
doc.add(p);
final Set<Pair<String, String>> data = cve.getVendorProductList();
for (Pair<String, String> pair : data) {
saveEntry(pair.getLeft(), pair.getRight(), indexWriter);
v.setStringValue(pair.getLeft());
p.setStringValue(pair.getRight());
indexWriter.addDocument(doc);
}
} catch (DatabaseException ex) {
LOGGER.debug("", ex);
@@ -287,8 +278,9 @@ public final class CpeMemoryIndex {
if (searchString == null || searchString.trim().isEmpty()) {
throw new ParseException("Query is null or empty");
}
LOGGER.debug(searchString);
final Query query = queryParser.parse(searchString);
return indexSearcher.search(query, maxQueryResults);
return search(query, maxQueryResults);
}
/**

View File

@@ -48,7 +48,7 @@ public class IndexEntry implements Serializable {
*/
public String getDocumentId() {
if (documentId == null && vendor != null && product != null) {
documentId = vendor + ":" + product;
documentId = vendor + ':' + product;
}
return documentId;
}

View File

@@ -77,6 +77,7 @@ public final class LuceneUtils {
case '*':
case '?':
case ':':
case '/':
case '\\': //it is supposed to fall through here
buf.append('\\');
default:

View File

@@ -94,13 +94,13 @@ public class MavenArtifact {
}
if (jarAvailable) {
//org/springframework/spring-core/3.2.0.RELEASE/spring-core-3.2.0.RELEASE.pom
this.artifactUrl = base + groupId.replace('.', '/') + "/" + artifactId + "/"
+ version + "/" + artifactId + "-" + version + ".jar";
this.artifactUrl = base + groupId.replace('.', '/') + '/' + artifactId + '/'
+ version + '/' + artifactId + '-' + version + ".jar";
}
if (pomAvailable) {
//org/springframework/spring-core/3.2.0.RELEASE/spring-core-3.2.0.RELEASE.pom
this.pomUrl = base + groupId.replace('.', '/') + "/" + artifactId + "/"
+ version + "/" + artifactId + "-" + version + ".pom";
this.pomUrl = base + groupId.replace('.', '/') + '/' + artifactId + '/'
+ version + '/' + artifactId + '-' + version + ".pom";
}
}

View File

@@ -63,7 +63,7 @@ public class NexusSearch {
this.rootURL = rootURL;
try {
if (null != Settings.getString(Settings.KEYS.PROXY_SERVER)
&& Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_PROXY)) {
&& Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY)) {
useProxy = true;
LOGGER.debug("Using proxy");
} else {

View File

@@ -17,11 +17,9 @@
*/
package org.owasp.dependencycheck.data.nvdcve;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.Driver;
@@ -29,7 +27,10 @@ import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import org.apache.commons.io.IOUtils;
import org.owasp.dependencycheck.utils.DBUtils;
import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -58,6 +59,10 @@ public final class ConnectionFactory {
* Resource location for SQL file used to create the database schema.
*/
public static final String DB_STRUCTURE_UPDATE_RESOURCE = "data/upgrade_%s.sql";
/**
* The URL that discusses upgrading non-H2 databases.
*/
public static final String UPGRADE_HELP_URL = "http://jeremylong.github.io/DependencyCheck/data/upgrade.html";
/**
* The database driver used to connect to the database.
*/
@@ -243,22 +248,15 @@ public final class ConnectionFactory {
*/
private static void createTables(Connection conn) throws DatabaseException {
LOGGER.debug("Creating database structure");
InputStream is;
InputStreamReader reader;
BufferedReader in = null;
InputStream is = null;
try {
is = ConnectionFactory.class.getClassLoader().getResourceAsStream(DB_STRUCTURE_RESOURCE);
reader = new InputStreamReader(is, "UTF-8");
in = new BufferedReader(reader);
final StringBuilder sb = new StringBuilder(2110);
String tmp;
while ((tmp = in.readLine()) != null) {
sb.append(tmp);
}
final String dbStructure = IOUtils.toString(is, "UTF-8");
Statement statement = null;
try {
statement = conn.createStatement();
statement.execute(sb.toString());
statement.execute(dbStructure);
} catch (SQLException ex) {
LOGGER.debug("", ex);
throw new DatabaseException("Unable to create database statement", ex);
@@ -268,13 +266,7 @@ public final class ConnectionFactory {
} catch (IOException ex) {
throw new DatabaseException("Unable to create database schema", ex);
} finally {
if (in != null) {
try {
in.close();
} catch (IOException ex) {
LOGGER.trace("", ex);
}
}
IOUtils.closeQuietly(is);
}
}
@@ -288,48 +280,54 @@ public final class ConnectionFactory {
* @throws DatabaseException thrown if there is an exception upgrading the database schema
*/
private static void updateSchema(Connection conn, String schema) throws DatabaseException {
LOGGER.debug("Updating database structure");
InputStream is;
InputStreamReader reader;
BufferedReader in = null;
String updateFile = null;
final String databaseProductName;
try {
updateFile = String.format(DB_STRUCTURE_UPDATE_RESOURCE, schema);
is = ConnectionFactory.class.getClassLoader().getResourceAsStream(updateFile);
if (is == null) {
throw new DatabaseException(String.format("Unable to load update file '%s'", updateFile));
}
reader = new InputStreamReader(is, "UTF-8");
in = new BufferedReader(reader);
final StringBuilder sb = new StringBuilder(2110);
String tmp;
while ((tmp = in.readLine()) != null) {
sb.append(tmp);
}
Statement statement = null;
databaseProductName = conn.getMetaData().getDatabaseProductName();
} catch (SQLException ex) {
throw new DatabaseException("Unable to get the database product name");
}
if ("h2".equalsIgnoreCase(databaseProductName)) {
LOGGER.debug("Updating database structure");
InputStream is = null;
String updateFile = null;
try {
statement = conn.createStatement();
statement.execute(sb.toString());
} catch (SQLException ex) {
LOGGER.debug("", ex);
throw new DatabaseException("Unable to update database schema", ex);
} finally {
DBUtils.closeStatement(statement);
}
} catch (IOException ex) {
final String msg = String.format("Upgrade SQL file does not exist: %s", updateFile);
throw new DatabaseException(msg, ex);
} finally {
if (in != null) {
try {
in.close();
} catch (IOException ex) {
LOGGER.trace("", ex);
updateFile = String.format(DB_STRUCTURE_UPDATE_RESOURCE, schema);
is = ConnectionFactory.class.getClassLoader().getResourceAsStream(updateFile);
if (is == null) {
throw new DatabaseException(String.format("Unable to load update file '%s'", updateFile));
}
final String dbStructureUpdate = IOUtils.toString(is, "UTF-8");
Statement statement = null;
try {
statement = conn.createStatement();
final boolean success = statement.execute(dbStructureUpdate);
if (!success && statement.getUpdateCount() <= 0) {
throw new DatabaseException(String.format("Unable to upgrade the database schema to %s", schema));
}
} catch (SQLException ex) {
LOGGER.debug("", ex);
throw new DatabaseException("Unable to update database schema", ex);
} finally {
DBUtils.closeStatement(statement);
}
} catch (IOException ex) {
final String msg = String.format("Upgrade SQL file does not exist: %s", updateFile);
throw new DatabaseException(msg, ex);
} finally {
IOUtils.closeQuietly(is);
}
} else {
LOGGER.error("The database schema must be upgraded to use this version of dependency-check. Please see {} for more information.", UPGRADE_HELP_URL);
throw new DatabaseException("Database schema is out of date");
}
}
/**
* Counter to ensure that calls to ensureSchemaVersion does not end up in an endless loop.
*/
private static int callDepth = 0;
/**
* Uses the provided connection to check the specified schema version within the database.
*
@@ -344,10 +342,15 @@ public final class ConnectionFactory {
cs = conn.prepareCall("SELECT value FROM properties WHERE id = 'version'");
rs = cs.executeQuery();
if (rs.next()) {
if (!DB_SCHEMA_VERSION.equals(rs.getString(1))) {
final DependencyVersion current = DependencyVersionUtil.parseVersion(DB_SCHEMA_VERSION);
final DependencyVersion db = DependencyVersionUtil.parseVersion(rs.getString(1));
if (current.compareTo(db) > 0) {
LOGGER.debug("Current Schema: " + DB_SCHEMA_VERSION);
LOGGER.debug("DB Schema: " + rs.getString(1));
updateSchema(conn, rs.getString(1));
if (++callDepth < 10) {
ensureSchemaVersion(conn);
}
}
} else {
throw new DatabaseException("Database schema is missing");

View File

@@ -29,8 +29,10 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.MissingResourceException;
import java.util.Properties;
import java.util.ResourceBundle;
import java.util.Set;
@@ -74,9 +76,17 @@ public class CveDB {
*/
public CveDB() throws DatabaseException {
super();
statementBundle = ResourceBundle.getBundle("data/dbStatements");
try {
open();
try {
final String databaseProductName = conn.getMetaData().getDatabaseProductName();
LOGGER.debug("Database dialect: {}", databaseProductName);
final Locale dbDialect = new Locale(databaseProductName);
statementBundle = ResourceBundle.getBundle("data/dbStatements", dbDialect);
} catch (SQLException se) {
LOGGER.warn("Problem loading database specific dialect!", se);
statementBundle = ResourceBundle.getBundle("data/dbStatements");
}
databaseProperties = new DatabaseProperties(this);
} catch (DatabaseException ex) {
throw ex;
@@ -252,44 +262,6 @@ public class CveDB {
return prop;
}
/**
* Saves a set of properties to the database.
*
* @param props a collection of properties
*/
void saveProperties(Properties props) {
PreparedStatement updateProperty = null;
PreparedStatement insertProperty = null;
try {
try {
updateProperty = getConnection().prepareStatement(statementBundle.getString("UPDATE_PROPERTY"));
insertProperty = getConnection().prepareStatement(statementBundle.getString("INSERT_PROPERTY"));
} catch (SQLException ex) {
LOGGER.warn("Unable to save properties to the database");
LOGGER.debug("Unable to save properties to the database", ex);
return;
}
for (Entry<Object, Object> entry : props.entrySet()) {
final String key = entry.getKey().toString();
final String value = entry.getValue().toString();
try {
updateProperty.setString(1, value);
updateProperty.setString(2, key);
if (updateProperty.executeUpdate() == 0) {
insertProperty.setString(1, key);
insertProperty.setString(2, value);
}
} catch (SQLException ex) {
LOGGER.warn("Unable to save property '{}' with a value of '{}' to the database", key, value);
LOGGER.debug("", ex);
}
}
} finally {
DBUtils.closeStatement(updateProperty);
DBUtils.closeStatement(insertProperty);
}
}
/**
* Saves a property to the database.
*
@@ -297,38 +269,38 @@ public class CveDB {
* @param value the property value
*/
void saveProperty(String key, String value) {
PreparedStatement updateProperty = null;
PreparedStatement insertProperty = null;
try {
try {
updateProperty = getConnection().prepareStatement(statementBundle.getString("UPDATE_PROPERTY"));
} catch (SQLException ex) {
LOGGER.warn("Unable to save properties to the database");
LOGGER.debug("Unable to save properties to the database", ex);
return;
}
try {
updateProperty.setString(1, value);
updateProperty.setString(2, key);
if (updateProperty.executeUpdate() == 0) {
try {
insertProperty = getConnection().prepareStatement(statementBundle.getString("INSERT_PROPERTY"));
} catch (SQLException ex) {
LOGGER.warn("Unable to save properties to the database");
LOGGER.debug("Unable to save properties to the database", ex);
return;
}
insertProperty.setString(1, key);
insertProperty.setString(2, value);
insertProperty.execute();
final PreparedStatement mergeProperty = getConnection().prepareStatement(statementBundle.getString("MERGE_PROPERTY"));
try {
mergeProperty.setString(1, key);
mergeProperty.setString(2, value);
mergeProperty.executeUpdate();
} finally {
DBUtils.closeStatement(mergeProperty);
}
} catch (MissingResourceException mre) {
// No Merge statement, so doing an Update/Insert...
PreparedStatement updateProperty = null;
PreparedStatement insertProperty = null;
try {
updateProperty = getConnection().prepareStatement(statementBundle.getString("UPDATE_PROPERTY"));
updateProperty.setString(1, value);
updateProperty.setString(2, key);
if (updateProperty.executeUpdate() == 0) {
insertProperty = getConnection().prepareStatement(statementBundle.getString("INSERT_PROPERTY"));
insertProperty.setString(1, key);
insertProperty.setString(2, value);
insertProperty.executeUpdate();
}
} finally {
DBUtils.closeStatement(updateProperty);
DBUtils.closeStatement(insertProperty);
}
} catch (SQLException ex) {
LOGGER.warn("Unable to save property '{}' with a value of '{}' to the database", key, value);
LOGGER.debug("", ex);
}
} finally {
DBUtils.closeStatement(updateProperty);
DBUtils.closeStatement(insertProperty);
} catch (SQLException ex) {
LOGGER.warn("Unable to save property '{}' with a value of '{}' to the database", key, value);
LOGGER.debug("", ex);
}
}
@@ -420,7 +392,7 @@ public class CveDB {
if (cwe != null) {
final String name = CweDB.getCweName(cwe);
if (name != null) {
cwe += " " + name;
cwe += ' ' + name;
}
}
final int cveId = rsV.getInt(1);

View File

@@ -45,6 +45,10 @@ public class DatabaseProperties {
* updates)..
*/
public static final String MODIFIED = "Modified";
/**
* The properties file key for the last checked field - used to store the last check time of the Modified NVD CVE xml file.
*/
public static final String LAST_CHECKED = "NVD CVE Checked";
/**
* The properties file key for the last updated field - used to store the last updated time of the Modified NVD CVE xml file.
*/

View File

@@ -137,7 +137,7 @@ public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
*/
private boolean updateNeeded() {
final long now = System.currentTimeMillis();
final int days = Settings.getInt(Settings.KEYS.CVE_MODIFIED_VALID_FOR_DAYS, 30);
final int days = Settings.getInt(Settings.KEYS.CPE_MODIFIED_VALID_FOR_DAYS, 30);
long timestamp = 0;
final String ts = getProperties().getProperty(LAST_CPE_UPDATE);
if (ts != null && ts.matches("^[0-9]+$")) {

View File

@@ -66,9 +66,11 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
public void update() throws UpdateException {
try {
openDataStores();
final UpdateableNvdCve updateable = getUpdatesNeeded();
if (updateable.isUpdateNeeded()) {
performUpdate(updateable);
if (checkUpdate()) {
final UpdateableNvdCve updateable = getUpdatesNeeded();
if (updateable.isUpdateNeeded()) {
performUpdate(updateable);
}
}
} catch (MalformedURLException ex) {
LOGGER.warn(
@@ -87,6 +89,35 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
}
}
/**
* Checks if the NVD CVE XML files were last checked recently.
* As an optimization, we can avoid repetitive checks against the NVD.
* Setting CVE_CHECK_VALID_FOR_HOURS determines the duration since last check before checking again.
* A database property stores the timestamp of the last check.
*
* @return true to proceed with the check, or false to skip.
*/
private boolean checkUpdate () throws UpdateException {
boolean proceed = true;
// If the valid setting has not been specified, then we proceed to check...
final int validForHours = Settings.getInt(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, 0);
if (0 < validForHours) {
// ms Valid = valid (hours) x 60 min/hour x 60 sec/min x 1000 ms/sec
final long msValid = validForHours * 60L * 60L * 1000L;
final long lastChecked = Long.parseLong(getProperties().getProperty(DatabaseProperties.LAST_CHECKED, "0"));
final long now = System.currentTimeMillis();
proceed = (now - lastChecked) > msValid;
if (proceed) {
getProperties().save(DatabaseProperties.LAST_CHECKED, Long.toString(now));
} else {
LOGGER.info("Skipping NVD check since last check was within {} hours.", validForHours);
LOGGER.debug("Last NVD was at {}, and now {} is within {} ms.",
lastChecked, now, msValid);
}
}
return proceed;
}
/**
* Downloads the latest NVD CVE XML file from the web and imports it into the current CVE Database.
*

View File

@@ -68,8 +68,8 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
final File file2;
try {
file1 = File.createTempFile("cve" + nvdCveInfo.getId() + "_", ".xml", Settings.getTempDirectory());
file2 = File.createTempFile("cve_1_2_" + nvdCveInfo.getId() + "_", ".xml", Settings.getTempDirectory());
file1 = File.createTempFile("cve" + nvdCveInfo.getId() + '_', ".xml", Settings.getTempDirectory());
file2 = File.createTempFile("cve_1_2_" + nvdCveInfo.getId() + '_', ".xml", Settings.getTempDirectory());
} catch (IOException ex) {
throw new UpdateException("Unable to create temporary files", ex);
}

View File

@@ -114,10 +114,10 @@ public class NvdCve12Handler extends DefaultHandler {
in the nvd cve 2.0. */
String cpe = "cpe:/a:" + vendor + ":" + product;
if (num != null) {
cpe += ":" + num;
cpe += ':' + num;
}
if (edition != null) {
cpe += ":" + edition;
cpe += ':' + edition;
}
final VulnerableSoftware vs = new VulnerableSoftware();
vs.setCpe(cpe);

View File

@@ -341,7 +341,7 @@ public class Dependency implements Serializable, Comparable<Dependency> {
}
}
if (!found) {
LOGGER.debug("Adding new maven identifier {}", mavenArtifact.toString());
LOGGER.debug("Adding new maven identifier {}", mavenArtifact);
this.addIdentifier("maven", mavenArtifact.toString(), mavenArtifact.getArtifactUrl(), Confidence.HIGHEST);
}
}

View File

@@ -20,6 +20,7 @@ package org.owasp.dependencycheck.suppression;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.Vulnerability;
@@ -381,30 +382,7 @@ public class SuppressionRule {
* @return true if the property type does not specify a version; otherwise false
*/
boolean cpeHasNoVersion(PropertyType c) {
if (c.isRegex()) {
return false;
}
if (countCharacter(c.getValue(), ':') == 3) {
return true;
}
return false;
}
/**
* Counts the number of occurrences of the character found within the string.
*
* @param str the string to check
* @param c the character to count
* @return the number of times the character is found in the string
*/
int countCharacter(String str, char c) {
int count = 0;
int pos = str.indexOf(c) + 1;
while (pos > 0) {
count += 1;
pos = str.indexOf(c, pos) + 1;
}
return count;
return !c.isRegex() && StringUtils.countMatches(c.getValue(), ':') == 3;
}
/**
@@ -442,43 +420,43 @@ public class SuppressionRule {
final StringBuilder sb = new StringBuilder();
sb.append("SuppressionRule{");
if (filePath != null) {
sb.append("filePath=").append(filePath).append(",");
sb.append("filePath=").append(filePath).append(',');
}
if (sha1 != null) {
sb.append("sha1=").append(sha1).append(",");
sb.append("sha1=").append(sha1).append(',');
}
if (gav != null) {
sb.append("gav=").append(gav).append(",");
sb.append("gav=").append(gav).append(',');
}
if (cpe != null && !cpe.isEmpty()) {
sb.append("cpe={");
for (PropertyType pt : cpe) {
sb.append(pt).append(",");
sb.append(pt).append(',');
}
sb.append("}");
sb.append('}');
}
if (cwe != null && !cwe.isEmpty()) {
sb.append("cwe={");
for (String s : cwe) {
sb.append(s).append(",");
sb.append(s).append(',');
}
sb.append("}");
sb.append('}');
}
if (cve != null && !cve.isEmpty()) {
sb.append("cve={");
for (String s : cve) {
sb.append(s).append(",");
sb.append(s).append(',');
}
sb.append("}");
sb.append('}');
}
if (cvssBelow != null && !cvssBelow.isEmpty()) {
sb.append("cvssBelow={");
for (Float s : cvssBelow) {
sb.append(s).append(",");
sb.append(s).append(',');
}
sb.append("}");
sb.append('}');
}
sb.append("}");
sb.append('}');
return sb.toString();
}
}

View File

@@ -36,11 +36,12 @@ public final class DateUtil {
*
* @param date the date to be checked.
* @param compareTo the date to compare to.
* @param range the range in days to be considered valid.
* @param dayRange the range in days to be considered valid.
* @return whether or not the date is within the range.
*/
public static boolean withinDateRange(long date, long compareTo, int range) {
final double differenceInDays = (compareTo - date) / 1000.0 / 60.0 / 60.0 / 24.0;
return differenceInDays < range;
public static boolean withinDateRange(long date, long compareTo, int dayRange) {
// ms = dayRange x 24 hours/day x 60 min/hour x 60 sec/min x 1000 ms/sec
final long msRange = dayRange * 24L * 60L * 60L * 1000L;
return (compareTo - date) < msRange;
}
}

View File

@@ -115,7 +115,7 @@ public class DependencyVersion implements Iterable<String>, Comparable<Dependenc
*/
@Override
public String toString() {
return StringUtils.join(versionParts.toArray(), ".");
return StringUtils.join(versionParts, '.');
}
/**

View File

@@ -182,13 +182,11 @@ public final class ExtractionUtil {
while ((entry = input.getNextEntry()) != null) {
if (entry.isDirectory()) {
final File dir = new File(destination, entry.getName());
if (!dir.exists()) {
if (!dir.mkdirs()) {
final String msg = String.format(
"Unable to create directory '%s'.",
dir.getAbsolutePath());
throw new AnalysisException(msg);
}
if (!dir.exists() && !dir.mkdirs()) {
final String msg = String.format(
"Unable to create directory '%s'.",
dir.getAbsolutePath());
throw new AnalysisException(msg);
}
} else {
extractFile(input, destination, filter, entry);
@@ -264,13 +262,11 @@ public final class ExtractionUtil {
private static void createParentFile(final File file)
throws ExtractionException {
final File parent = file.getParentFile();
if (!parent.isDirectory()) {
if (!parent.mkdirs()) {
final String msg = String.format(
"Unable to build directory '%s'.",
parent.getAbsolutePath());
throw new ExtractionException(msg);
}
if (!parent.isDirectory() && !parent.mkdirs()) {
final String msg = String.format(
"Unable to build directory '%s'.",
parent.getAbsolutePath());
throw new ExtractionException(msg);
}
}
}

View File

@@ -1,47 +0,0 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2012 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.utils;
import java.io.FilterInputStream;
import java.io.InputStream;
/**
* NonClosingStream is a stream filter which prevents another class that processes the stream from closing it. This is
* necessary when dealing with things like JAXB and zipInputStreams.
*
* @author Jeremy Long
*/
public class NonClosingStream extends FilterInputStream {
/**
* Constructs a new NonClosingStream.
*
* @param in an input stream.
*/
public NonClosingStream(InputStream in) {
super(in);
}
/**
* Prevents closing of the stream.
*/
@Override
public void close() {
// don't close the stream.
}
}

View File

@@ -21,6 +21,9 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.commons.lang3.text.StrLookup;
import org.apache.commons.lang3.text.StrSubstitutor;
/**
* A simple pojo to hold data related to a Maven POM file.
*
@@ -307,33 +310,27 @@ public class Model {
* @return the interpolated text.
*/
public static String interpolateString(String text, Properties properties) {
final Properties props = properties;
if (text == null) {
if (null == text || null == properties) {
return text;
}
if (props == null) {
return text;
}
final int pos = text.indexOf("${");
if (pos < 0) {
return text;
}
final int end = text.indexOf("}");
if (end < pos) {
return text;
}
final String propName = text.substring(pos + 2, end);
String propValue = interpolateString(props.getProperty(propName), props);
if (propValue == null) {
propValue = "";
}
final StringBuilder sb = new StringBuilder(propValue.length() + text.length());
sb.append(text.subSequence(0, pos));
sb.append(propValue);
sb.append(text.substring(end + 1));
return interpolateString(sb.toString(), props); //yes yes, this should be a loop...
final StrSubstitutor substitutor = new StrSubstitutor(new PropertyLookup(properties));
return substitutor.replace(text);
}
/**
* Utility class that can provide values from a Properties object to a StrSubstitutor.
*/
private static class PropertyLookup extends StrLookup {
private final Properties props;
public PropertyLookup(Properties props) {
this.props = props;
}
@Override
public String lookup(String key) {
return props.getProperty(key);
}
}
}

View File

@@ -0,0 +1,15 @@
# Copyright 2015 OWASP.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
MERGE_PROPERTY=MERGE INTO properties (id, value) KEY(id) VALUES(?, ?)

View File

@@ -0,0 +1,15 @@
# Copyright 2015 OWASP.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
MERGE_PROPERTY=CALL save_property(?, ?)

View File

@@ -0,0 +1,16 @@
# Copyright 2015 OWASP.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
MERGE_PROPERTY=CALL save_property(?, ?)
CLEANUP_ORPHANS=DELETE FROM cpeEntry WHERE id IN (SELECT id FROM cpeEntry LEFT JOIN software ON cpeEntry.id = software.CPEEntryId WHERE software.CPEEntryId IS NULL);

View File

@@ -37,4 +37,20 @@ CREATE INDEX idxSoftwareCpe ON software(cpeEntryId);
INSERT INTO properties(id,value) VALUES ('version','2.9');
CREATE USER 'dcuser' IDENTIFIED BY 'DC-Pass1337!';
GRANT SELECT, INSERT, DELETE, UPDATE ON dependencycheck.* TO 'dcuser';
GRANT SELECT, INSERT, DELETE, UPDATE ON dependencycheck.* TO 'dcuser';
DROP PROCEDURE IF EXISTS save_property;
DELIMITER //
CREATE PROCEDURE save_property
(IN prop varchar(50), IN val varchar(500))
BEGIN
INSERT INTO properties (`id`, `value`) VALUES (prop, val)
ON DUPLICATE KEY UPDATE `value`=val;
END //
DELIMITER ;
GRANT EXECUTE ON PROCEDURE dependencycheck.save_property TO 'dcuser';
UPDATE Properties SET value='3.0' WHERE ID='version';

View File

@@ -0,0 +1,53 @@
CREATE USER dcuser WITH PASSWORD 'DC-Pass1337!';
DROP TABLE IF EXISTS software;
DROP TABLE IF EXISTS cpeEntry;
DROP TABLE IF EXISTS reference;
DROP TABLE IF EXISTS vulnerability;
DROP TABLE IF EXISTS properties;
CREATE TABLE properties (id varchar(50) PRIMARY KEY, value varchar(500));
CREATE TABLE vulnerability (id SERIAL PRIMARY KEY, cve VARCHAR(20) UNIQUE,
description VARCHAR(8000), cwe VARCHAR(10), cvssScore DECIMAL(3,1), cvssAccessVector VARCHAR(20),
cvssAccessComplexity VARCHAR(20), cvssAuthentication VARCHAR(20), cvssConfidentialityImpact VARCHAR(20),
cvssIntegrityImpact VARCHAR(20), cvssAvailabilityImpact VARCHAR(20));
CREATE TABLE reference (cveid INT, name VARCHAR(1000), url VARCHAR(1000), source VARCHAR(255),
CONSTRAINT fkReference FOREIGN KEY (cveid) REFERENCES vulnerability(id) ON DELETE CASCADE);
CREATE TABLE cpeEntry (id SERIAL PRIMARY KEY, cpe VARCHAR(250), vendor VARCHAR(255), product VARCHAR(255));
CREATE TABLE software (cveid INT, cpeEntryId INT, previousVersion VARCHAR(50)
, CONSTRAINT fkSoftwareCve FOREIGN KEY (cveid) REFERENCES vulnerability(id) ON DELETE CASCADE
, CONSTRAINT fkSoftwareCpeProduct FOREIGN KEY (cpeEntryId) REFERENCES cpeEntry(id));
CREATE INDEX idxVulnerability ON vulnerability(cve);
CREATE INDEX idxReference ON reference(cveid);
CREATE INDEX idxCpe ON cpeEntry(cpe);
CREATE INDEX idxCpeEntry ON cpeEntry(vendor, product);
CREATE INDEX idxSoftwareCve ON software(cveid);
CREATE INDEX idxSoftwareCpe ON software(cpeEntryId);
INSERT INTO properties(id,value) VALUES ('version','2.9');
GRANT SELECT, INSERT, DELETE, UPDATE ON ALL TABLES IN SCHEMA public TO dcuser;
GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA public to dcuser;
DROP FUNCTION IF EXISTS save_property(varchar(50),varchar(500));
CREATE FUNCTION save_property (IN prop varchar(50), IN val varchar(500))
RETURNS void
AS
$$
UPDATE properties SET "value"=val WHERE id=prop;
INSERT INTO properties (id, value)
SELECT prop, val
WHERE NOT EXISTS (SELECT 1 FROM properties WHERE id=prop);
$$ LANGUAGE sql;
GRANT EXECUTE ON FUNCTION public.save_property(varchar(50),varchar(500)) TO dcuser;
UPDATE Properties SET value='3.0' WHERE ID='version';

View File

@@ -1,7 +1 @@
--the following is not currently used.
--ALTER TABLE cpeEntry ADD COLUMN IF NOT EXISTS dictionaryEntry BOOLEAN;
--ALTER TABLE cpeEntry ALTER COLUMN dictionaryEntry SET DEFAULT FALSE;
--UPDATE cpeEntry SET dictionaryEntry=false;
--UPDATE Properties SET value='3.0' WHERE ID='version';
UPDATE Properties SET value='3.0' WHERE ID='version';

View File

@@ -0,0 +1,7 @@
--the following is not currently used.
--ALTER TABLE cpeEntry ADD COLUMN IF NOT EXISTS dictionaryEntry BOOLEAN;
--ALTER TABLE cpeEntry ALTER COLUMN dictionaryEntry SET DEFAULT FALSE;
--UPDATE cpeEntry SET dictionaryEntry=false;
--UPDATE Properties SET value='3.1' WHERE ID='version';

View File

@@ -0,0 +1,15 @@
DROP PROCEDURE IF EXISTS save_property;
DELIMITER //
CREATE PROCEDURE save_property
(IN prop varchar(50), IN val varchar(500))
BEGIN
INSERT INTO properties (`id`, `value`) VALUES (prop, val)
ON DUPLICATE KEY UPDATE `value`=val;
END //
DELIMITER ;
GRANT EXECUTE ON PROCEDURE dependencycheck.save_property TO 'dcuser';
UPDATE Properties SET value='3.0' WHERE ID='version';

View File

@@ -161,4 +161,32 @@
<gav regex="true">.*\bhk2\b.*</gav>
<cpe>cpe:/a:oracle:glassfish</cpe>
</suppress>
<suppress base="true">
<notes><![CDATA[
file name: petals-se-camel-1.0.0.jar - false positive for apache camel.
]]></notes>
<gav regex="true">org.ow2.petals:petals-se-camel:.*</gav>
<cpe>cpe:/a:apache:camel</cpe>
</suppress>
<suppress base="true">
<notes><![CDATA[
Mina gets flagged as apache-ssl
]]></notes>
<gav regex="true">org.apache.mina:mina.*</gav>
<cpe>cpe:/a:apache-ssl:apache-ssl</cpe>
</suppress>
<suppress base="true">
<notes><![CDATA[
Woden gets flagged as apache-ssl
]]></notes>
<gav regex="true">org.apache.woden:woden.*</gav>
<cpe>cpe:/a:apache-ssl:apache-ssl</cpe>
</suppress>
<suppress base="true">
<notes><![CDATA[
spec gets flagged as the implementation.
]]></notes>
<gav regex="true">org.apache.geronimo.specs:.*</gav>
<cpe>cpe:/a:apache:geronimo</cpe>
</suppress>
</suppressions>

View File

@@ -18,8 +18,8 @@ engine.version.url=http://jeremylong.github.io/DependencyCheck/current.txt
data.directory=[JAR]/data
#if the filename has a %s it will be replaced with the current expected version
data.file_name=dc.h2.db
data.version=2.9
data.connection_string=jdbc:h2:file:%s;FILE_LOCK=SERIALIZED;AUTOCOMMIT=ON;
data.version=3.0
data.connection_string=jdbc:h2:file:%s;FILE_LOCK=FS;AUTOCOMMIT=ON;
#data.connection_string=jdbc:mysql://localhost:3306/dependencycheck
# user name and password for the database connection. The inherent case is to use H2.
@@ -41,13 +41,15 @@ data.driver_path=
# to update the other files if we are within this timespan. Per NIST this file
# holds 8 days of updates, we are using 7 just to be safe.
cve.url.modified.validfordays=7
# the number of hours to wait before checking if updates are available from the NVD.
cve.check.validforhours=4
#first year to pull data from the URLs below
cve.startyear=2002
# the path to the modified nvd cve xml file.
cve.url-1.2.modified=https://nvd.nist.gov/download/nvdcve-Modified.xml.gz
#cve.url-1.2.modified=http://nvd.nist.gov/download/nvdcve-modified.xml
cve.url-2.0.modified=https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-Modified.xml.gz
#cve.url-2.0.modified=http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-modified.xml
cve.startyear=2002
cve.url-1.2.base=https://nvd.nist.gov/download/nvdcve-%d.xml.gz
#cve.url-1.2.base=http://nvd.nist.gov/download/nvdcve-%d.xml
cve.url-2.0.base=https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml.gz
@@ -79,3 +81,22 @@ archive.scan.depth=3
# use HEAD (default) or GET as HTTP request method for query timestamp
downloader.quick.query.timestamp=true
analyzer.jar.enabled=true
analyzer.archive.enabled=true
analyzer.node.package.enabled=true
analyzer.composer.lock.enabled=true
analyzer.python.distribution.enabled=true
analyzer.python.package.enabled=true
analyzer.ruby.gemspec.enabled=true
analyzer.autoconf.enabled=true
analyzer.cmake.enabled=true
analyzer.assembly.enabled=true
analyzer.nuspec.enabled=true
analyzer.openssl.enabled=true
analyzer.central.enabled=true
analyzer.nexus.enabled=false
#whether the nexus analyzer uses the proxy
analyzer.nexus.proxy=true

View File

@@ -578,6 +578,7 @@ arising out of or in connection with the use of this tool, the analysis performe
<td data-sort-value="$sortValue">
#set($sortValue="")
#foreach($id in $dependency.getIdentifiers())
#set($cpeSort=0)
#if ($id.type=="maven")
#if ($mavenlink=="" || !$mavenlink.url)
#set($mavenlink=$id)
@@ -591,7 +592,6 @@ arising out of or in connection with the use of this tool, the analysis performe
#else
$enc.html($id.value)
#end
#set($cpeSort=0)
#if ($cpeIdConf == "")
#set($cpeIdConf=$id.confidence)
#set($cpeSort=$id.confidence.ordinal())

View File

@@ -15,7 +15,7 @@
*
* Copyright (c) 2012 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.nvdcve;
package org.owasp.dependencycheck;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
@@ -31,6 +31,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An abstract database test case that is used to ensure the H2 DB exists prior to performing tests that utilize the data
* contained within.
*
* @author Jeremy Long
*/

View File

@@ -34,7 +34,7 @@ public class EngineIntegrationTest extends BaseTest {
@Before
public void setUp() throws Exception {
org.owasp.dependencycheck.data.nvdcve.BaseDBTestCase.ensureDBExists();
org.owasp.dependencycheck.BaseDBTestCase.ensureDBExists();
}
@After

View File

@@ -34,7 +34,7 @@ public class AbstractFileTypeAnalyzerTest extends BaseTest {
*/
@Test
public void testNewHashSet() {
Set result = AbstractFileTypeAnalyzer.newHashSet("one", "two");
Set<String> result = AbstractFileTypeAnalyzer.newHashSet("one", "two");
assertEquals(2, result.size());
assertTrue(result.contains("one"));
assertTrue(result.contains("two"));

View File

@@ -24,7 +24,7 @@ import static org.junit.Assert.*;
import org.junit.Test;
import org.owasp.dependencycheck.BaseTest;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.data.cpe.AbstractDatabaseTestCase;
import org.owasp.dependencycheck.BaseDBTestCase;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Settings;
@@ -32,7 +32,7 @@ import org.owasp.dependencycheck.utils.Settings;
*
* @author Jeremy Long
*/
public class ArchiveAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
public class ArchiveAnalyzerIntegrationTest extends BaseDBTestCase {
/**
* Test of getSupportedExtensions method, of class ArchiveAnalyzer.

View File

@@ -0,0 +1,80 @@
/*
* Copyright 2015 OWASP.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.owasp.dependencycheck.analyzer;
import java.io.File;
import java.io.FileFilter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.junit.Assume.assumeFalse;
import static org.junit.Assume.assumeNotNull;
import org.owasp.dependencycheck.BaseTest;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Settings;
/**
*
* @author jeremy
*/
public class ArchiveAnalyzerTest extends BaseTest {
@Before
public void setUp() {
Settings.setString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, "z2, z3");
}
/**
* Test of analyzeFileType method, of class ArchiveAnalyzer.
*/
@Test
public void testZippableExtensions() throws Exception {
assumeFalse(isPreviouslyLoaded("org.owasp.dependencycheck.analyzer.ArchiveAnalyzer"));
ArchiveAnalyzer instance = new ArchiveAnalyzer();
assertTrue(instance.getFileFilter().accept(new File("c:/test.zip")));
assertTrue(instance.getFileFilter().accept(new File("c:/test.z2")));
assertTrue(instance.getFileFilter().accept(new File("c:/test.z3")));
assertFalse(instance.getFileFilter().accept(new File("c:/test.z4")));
}
private boolean isPreviouslyLoaded(String className) {
try {
Method m = ClassLoader.class.getDeclaredMethod("findLoadedClass", new Class[]{String.class});
m.setAccessible(true);
Object t = m.invoke(Thread.currentThread().getContextClassLoader(), className);
return t != null;
} catch (NoSuchMethodException ex) {
Logger.getLogger(ArchiveAnalyzerTest.class.getName()).log(Level.SEVERE, null, ex);
} catch (SecurityException ex) {
Logger.getLogger(ArchiveAnalyzerTest.class.getName()).log(Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
Logger.getLogger(ArchiveAnalyzerTest.class.getName()).log(Level.SEVERE, null, ex);
} catch (IllegalArgumentException ex) {
Logger.getLogger(ArchiveAnalyzerTest.class.getName()).log(Level.SEVERE, null, ex);
} catch (InvocationTargetException ex) {
Logger.getLogger(ArchiveAnalyzerTest.class.getName()).log(Level.SEVERE, null, ex);
}
return false;
}
}

View File

@@ -33,7 +33,7 @@ import java.util.regex.Pattern;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.*;
import org.owasp.dependencycheck.data.nvdcve.BaseDBTestCase;
import org.owasp.dependencycheck.BaseDBTestCase;
/**
* Unit tests for CmakeAnalyzer.

View File

@@ -19,7 +19,7 @@ package org.owasp.dependencycheck.analyzer;
import java.io.File;
import java.io.IOException;
import java.util.HashSet;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import org.apache.lucene.index.CorruptIndexException;
@@ -28,7 +28,7 @@ import org.junit.Assert;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import org.owasp.dependencycheck.BaseTest;
import org.owasp.dependencycheck.data.cpe.AbstractDatabaseTestCase;
import org.owasp.dependencycheck.BaseDBTestCase;
import org.owasp.dependencycheck.data.cpe.IndexEntry;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
@@ -38,7 +38,7 @@ import org.owasp.dependencycheck.dependency.Identifier;
*
* @author Jeremy Long
*/
public class CPEAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
public class CPEAnalyzerIntegrationTest extends BaseDBTestCase {
/**
* Tests of buildSearch of class CPEAnalyzer.
@@ -49,11 +49,9 @@ public class CPEAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
*/
@Test
public void testBuildSearch() throws IOException, CorruptIndexException, ParseException {
Set<String> productWeightings = new HashSet<String>(1);
productWeightings.add("struts2");
Set<String> productWeightings = Collections.singleton("struts2");
Set<String> vendorWeightings = new HashSet<String>(1);
vendorWeightings.add("apache");
Set<String> vendorWeightings = Collections.singleton("apache");
String vendor = "apache software foundation";
String product = "struts 2 core";
@@ -238,11 +236,9 @@ public class CPEAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
CPEAnalyzer instance = new CPEAnalyzer();
instance.open();
Set<String> productWeightings = new HashSet<String>(1);
productWeightings.add("struts2");
Set<String> productWeightings = Collections.singleton("struts2");
Set<String> vendorWeightings = new HashSet<String>(1);
vendorWeightings.add("apache");
Set<String> vendorWeightings = Collections.singleton("apache");
List<IndexEntry> result = instance.searchCPE(vendor, product, productWeightings, vendorWeightings);
instance.close();

View File

@@ -18,13 +18,13 @@
package org.owasp.dependencycheck.analyzer;
import org.junit.Test;
import org.owasp.dependencycheck.data.cpe.AbstractDatabaseTestCase;
import org.owasp.dependencycheck.BaseDBTestCase;
/**
*
* @author Jeremy Long
*/
public class DependencyBundlingAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
public class DependencyBundlingAnalyzerIntegrationTest extends BaseDBTestCase {
/**
* Test of analyze method, of class DependencyBundlingAnalyzer.

View File

@@ -24,6 +24,7 @@ import org.junit.Before;
import org.junit.Test;
import org.owasp.dependencycheck.BaseTest;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.BaseDBTestCase;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Evidence;
@@ -33,12 +34,7 @@ import org.owasp.dependencycheck.utils.Settings;
*
* @author Jeremy Long
*/
public class HintAnalyzerTest extends BaseTest {
@Before
public void setUp() throws Exception {
org.owasp.dependencycheck.data.nvdcve.BaseDBTestCase.ensureDBExists();
}
public class HintAnalyzerTest extends BaseDBTestCase {
/**
* Test of getName method, of class HintAnalyzer.

View File

@@ -21,9 +21,9 @@ import java.io.File;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import org.owasp.dependencycheck.BaseDBTestCase;
import org.owasp.dependencycheck.BaseTest;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.data.cpe.AbstractDatabaseTestCase;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Settings;
@@ -32,7 +32,7 @@ import org.owasp.dependencycheck.utils.Settings;
*
* @author Jeremy Long
*/
public class VulnerabilitySuppressionAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
public class VulnerabilitySuppressionAnalyzerIntegrationTest extends BaseDBTestCase {
/**
* Test of getName method, of class VulnerabilitySuppressionAnalyzer.

View File

@@ -1,37 +0,0 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2012 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.cpe;
import org.junit.Before;
import org.owasp.dependencycheck.BaseTest;
import org.owasp.dependencycheck.data.nvdcve.BaseDBTestCase;
/**
* An abstract database test case that is used to ensure the H2 DB exists prior to performing tests that utilize the
* data contained within.
*
* @author Jeremy Long
*/
public abstract class AbstractDatabaseTestCase extends BaseTest {
@Before
public void setUp() throws Exception {
BaseDBTestCase.ensureDBExists();
}
}

View File

@@ -0,0 +1,47 @@
/*
* Copyright 2015 OWASP.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.owasp.dependencycheck.data.nvdcve;
import java.sql.Connection;
import java.sql.SQLException;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
import org.owasp.dependencycheck.BaseDBTestCase;
/**
*
* @author jeremy
*/
public class ConnectionFactoryTest extends BaseDBTestCase {
/**
* Test of initialize method, of class ConnectionFactory.
*
* @throws org.owasp.dependencycheck.data.nvdcve.DatabaseException
*/
@Test
public void testInitialize() throws DatabaseException, SQLException {
ConnectionFactory.initialize();
Connection result = ConnectionFactory.getConnection();
assertNotNull(result);
result.close();
ConnectionFactory.cleanup();
}
}

View File

@@ -17,6 +17,7 @@
*/
package org.owasp.dependencycheck.data.nvdcve;
import org.owasp.dependencycheck.BaseDBTestCase;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

View File

@@ -25,7 +25,9 @@ import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
import org.owasp.dependencycheck.utils.Settings;
/**
*
@@ -35,10 +37,12 @@ public class CveDBMySQLTest {
@BeforeClass
public static void setUpClass() {
Settings.initialize();
}
@AfterClass
public static void tearDownClass() {
Settings.cleanup();
}
@Before
@@ -93,7 +97,7 @@ public class CveDBMySQLTest {
CveDB instance = new CveDB();
try {
instance.open();
List result = instance.getVulnerabilities(cpeStr);
List<Vulnerability> result = instance.getVulnerabilities(cpeStr);
assertTrue(result.size() > 5);
} catch (Exception ex) {
System.out.println("Unable to access the My SQL database; verify that the db server is running and that the schema has been generated");

View File

@@ -17,6 +17,7 @@
*/
package org.owasp.dependencycheck.data.nvdcve;
import org.owasp.dependencycheck.BaseDBTestCase;
import java.util.Properties;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;

View File

@@ -18,7 +18,7 @@
package org.owasp.dependencycheck.data.update;
import org.junit.Test;
import org.owasp.dependencycheck.data.nvdcve.BaseDBTestCase;
import org.owasp.dependencycheck.BaseDBTestCase;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.data.update.exception.UpdateException;

View File

@@ -185,7 +185,6 @@ public class DependencyTest {
@Test
public void testGetIdentifiers() {
Dependency instance = new Dependency();
List expResult = null;
Set<Identifier> result = instance.getIdentifiers();
assertTrue(true); //this is just a getter setter pair.

View File

@@ -40,7 +40,7 @@ public class ReportGeneratorIntegrationTest extends BaseTest {
@Before
public void setUp() throws Exception {
org.owasp.dependencycheck.data.nvdcve.BaseDBTestCase.ensureDBExists();
org.owasp.dependencycheck.BaseDBTestCase.ensureDBExists();
}
/**

View File

@@ -61,7 +61,7 @@ public class SuppressionParserTest {
//File file = new File(this.getClass().getClassLoader().getResource("suppressions.xml").getPath());
File file = BaseTest.getResourceAsFile(this, "suppressions.xml");
SuppressionParser instance = new SuppressionParser();
List result = instance.parseSuppressionRules(file);
List<SuppressionRule> result = instance.parseSuppressionRules(file);
assertTrue(result.size() > 3);
}
}

View File

@@ -306,27 +306,6 @@ public class SuppressionRuleTest {
assertTrue(instance.cpeHasNoVersion(c));
}
/**
* Test of countCharacter method, of class SuppressionRule.
*/
@Test
public void testCountCharacter() {
String str = "cpe:/a:microsoft:.net_framework:4.5";
char c = ':';
SuppressionRule instance = new SuppressionRule();
int expResult = 4;
int result = instance.countCharacter(str, c);
assertEquals(expResult, result);
str = "::";
expResult = 2;
result = instance.countCharacter(str, c);
assertEquals(expResult, result);
str = "these are not the characters you are looking for";
expResult = 0;
result = instance.countCharacter(str, c);
assertEquals(expResult, result);
}
/**
* Test of identifierMatches method, of class SuppressionRule.
*/

View File

@@ -61,11 +61,11 @@ public class DependencyVersionTest {
@Test
public void testIterator() {
DependencyVersion instance = new DependencyVersion("1.2.3");
Iterator result = instance.iterator();
Iterator<String> result = instance.iterator();
assertTrue(result.hasNext());
int count = 1;
while (result.hasNext()) {
String v = (String) result.next();
String v = result.next();
assertTrue(String.valueOf(count++).equals(v));
}
}

View File

@@ -16,11 +16,9 @@ engine.version.url=http://jeremylong.github.io/DependencyCheck/current.txt
# will not be used. The data.directory will be resolved and if the connection string
# below contains a %s then the data.directory will replace the %s.
data.directory=[JAR]/data
# if the filename has a %s it will be replaced with the current expected version. For file
# based databases the below filename will be added to the data directory above and then
# if the connection string has a %s it will be replaced by the directory/filename path.
#if the filename has a %s it will be replaced with the current expected version
data.file_name=dc.h2.db
data.version=2.9
data.version=3.0
data.connection_string=jdbc:h2:file:%s;FILE_LOCK=SERIALIZED;AUTOCOMMIT=ON;
#data.connection_string=jdbc:mysql://localhost:3306/dependencycheck
@@ -39,19 +37,15 @@ data.password=DC-Pass1337!
data.driver_name=org.h2.Driver
data.driver_path=
# the path to the cpe xml file
#cpe.url=http://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.2.xml.gz
cpe.url=http://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.3.xml.gz
# the path to the cpe meta data file.
cpe.meta.url=http://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.2.meta
# the number of days that the modified nvd cve data holds data for. We don't need
# to update the other files if we are within this timespan. Per NIST this file
# holds 8 days of updates, we are using 7 just to be safe.
cve.url.modified.validfordays=7
# the path to the modified nvd cve xml file.
# the number of hours to wait before checking if updates are available from the NVD.
cve.check.validforhours=0
#first year to pull data from the URLs below
cve.startyear=2014
# the path to the modified nvd cve xml file.
cve.url-1.2.modified=https://nvd.nist.gov/download/nvdcve-Modified.xml.gz
#cve.url-1.2.modified=http://nvd.nist.gov/download/nvdcve-modified.xml
cve.url-2.0.modified=https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-Modified.xml.gz
@@ -62,6 +56,14 @@ cve.url-2.0.base=https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml.gz
#cve.url-2.0.base=http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml
cpe.validfordays=30
cpe.url=http://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.3.xml.gz
# file type analyzer settings:
analyzer.archive.enabled=true
analyzer.jar.enabled=true
analyzer.nuspec.enabled=true
analyzer.assembly.enabled=true
analyzer.composer.lock.enabled=true
# the URL for searching Nexus for SHA-1 hashes and whether it's enabled
analyzer.nexus.enabled=true
@@ -74,5 +76,27 @@ analyzer.nexus.proxy=true
analyzer.central.enabled=true
analyzer.central.url=http://search.maven.org/solrsearch/select
# the number of nested archives that will be searched.
archive.scan.depth=3
# use HEAD (default) or GET as HTTP request method for query timestamp
downloader.quick.query.timestamp=true
analyzer.jar.enabled=true
analyzer.archive.enabled=true
analyzer.node.package.enabled=true
analyzer.composer.lock.enabled=true
analyzer.python.distribution.enabled=true
analyzer.python.package.enabled=true
analyzer.ruby.gemspec.enabled=true
analyzer.autoconf.enabled=true
analyzer.cmake.enabled=true
analyzer.assembly.enabled=true
analyzer.nuspec.enabled=true
analyzer.openssl.enabled=true
analyzer.central.enabled=true
analyzer.nexus.enabled=false
#whether the nexus analyzer uses the proxy
analyzer.nexus.proxy=true