Merge pull request #1006 from jeremylong/issue-993

Improve Node.js and NSP analyzers
This commit is contained in:
Jeremy Long
2017-12-03 06:16:31 -05:00
committed by GitHub
33 changed files with 838 additions and 524 deletions

View File

@@ -20,7 +20,7 @@ Copyright (c) 2017 - Jeremy Long. All Rights Reserved.
<parent>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId>
<version>3.0.3-SNAPSHOT</version>
<version>3.1.0-SNAPSHOT</version>
</parent>
<name>Dependency-Check Build-Reporting</name>
<artifactId>build-reporting</artifactId>

View File

@@ -20,7 +20,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<parent>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId>
<version>3.0.3-SNAPSHOT</version>
<version>3.1.0-SNAPSHOT</version>
</parent>
<artifactId>dependency-check-ant</artifactId>

View File

@@ -20,7 +20,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<parent>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId>
<version>3.0.3-SNAPSHOT</version>
<version>3.1.0-SNAPSHOT</version>
</parent>
<artifactId>dependency-check-cli</artifactId>

View File

@@ -20,7 +20,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<parent>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId>
<version>3.0.3-SNAPSHOT</version>
<version>3.1.0-SNAPSHOT</version>
</parent>
<artifactId>dependency-check-core</artifactId>
@@ -164,6 +164,10 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
</plugins>
</reporting>
<dependencies>
<dependency>
<groupId>com.vdurmont</groupId>
<artifactId>semver4j</artifactId>
</dependency>
<!-- Note, to stay compatible with Jenkins installations only JARs compiled to 1.6 can be used -->
<dependency>
<groupId>joda-time</groupId>

View File

@@ -1038,6 +1038,15 @@ public class Engine implements FileFilter, AutoCloseable {
return settings;
}
/**
* Returns the mode of the engine.
*
* @return the mode of the engine
*/
public Mode getMode() {
return mode;
}
/**
* Adds a file type analyzer. This has been added solely to assist in unit
* testing the Engine.

View File

@@ -85,6 +85,14 @@ public abstract class AbstractAnalyzer implements Analyzer {
@Override
public void initialize(Settings settings) {
this.settings = settings;
final String key = getAnalyzerEnabledSettingKey();
try {
this.setEnabled(settings.getBoolean(key, true));
} catch (InvalidSettingException ex) {
final String msg = String.format("Invalid setting for property '%s'", key);
LOGGER.warn(msg);
LOGGER.debug(msg, ex);
}
}
/**
@@ -95,15 +103,6 @@ public abstract class AbstractAnalyzer implements Analyzer {
*/
@Override
public final void prepare(Engine engine) throws InitializationException {
final String key = getAnalyzerEnabledSettingKey();
try {
this.setEnabled(settings.getBoolean(key, true));
} catch (InvalidSettingException ex) {
final String msg = String.format("Invalid setting for property '%s'", key);
LOGGER.warn(msg);
LOGGER.debug(msg, ex);
}
if (isEnabled()) {
prepareAnalyzer(engine);
} else {

View File

@@ -0,0 +1,291 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2017 Steve Springett. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import javax.annotation.concurrent.ThreadSafe;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.json.JsonString;
import javax.json.JsonValue;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.utils.Checksum;
/**
* An abstract NPM analyzer that contains common methods for concrete
* implementations.
*
* @author Steve Springett
*/
@ThreadSafe
public abstract class AbstractNpmAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractNpmAnalyzer.class);
/**
* A descriptor for the type of dependencies processed or added by this
* analyzer.
*/
public static final String NPM_DEPENDENCY_ECOSYSTEM = "npm";
/**
* The file name to scan.
*/
private static final String PACKAGE_JSON = "package.json";
/**
* Determines if the file can be analyzed by the analyzer.
*
* @param pathname the path to the file
* @return true if the file can be analyzed by the given analyzer; otherwise
* false
*/
@Override
public boolean accept(File pathname) {
boolean accept = super.accept(pathname);
if (accept) {
try {
accept |= shouldProcess(pathname);
} catch (AnalysisException ex) {
throw new RuntimeException(ex.getMessage(), ex.getCause());
}
}
return accept;
}
/**
* Determines if the path contains "/node_modules/" (i.e. it is a child
* module. This analyzer does not scan child modules.
*
* @param pathname the path to test
* @return <code>true</code> if the path does not contain "/node_modules/"
* @throws AnalysisException thrown if the canonical path cannot be obtained
* from the given file
*/
protected boolean shouldProcess(File pathname) throws AnalysisException {
try {
// Do not scan the node_modules directory
if (pathname.getCanonicalPath().contains(File.separator + "node_modules" + File.separator)) {
LOGGER.debug("Skipping analysis of node module: " + pathname.getCanonicalPath());
return false;
}
} catch (IOException ex) {
throw new AnalysisException("Unable to process dependency", ex);
}
return true;
}
/**
* Construct a dependency object.
*
* @param dependency the parent dependency
* @param name the name of the dependency to create
* @param version the version of the dependency to create
* @param scope the scope of the dependency being created
* @return the generated dependency
*/
protected Dependency createDependency(Dependency dependency, String name, String version, String scope) {
final Dependency nodeModule = new Dependency(new File(dependency.getActualFile() + "?" + name), true);
nodeModule.setEcosystem(NPM_DEPENDENCY_ECOSYSTEM);
//this is virtual - the sha1 is purely for the hyperlink in the final html report
nodeModule.setSha1sum(Checksum.getSHA1Checksum(String.format("%s:%s", name, version)));
nodeModule.setMd5sum(Checksum.getMD5Checksum(String.format("%s:%s", name, version)));
nodeModule.addEvidence(EvidenceType.PRODUCT, "package.json", "name", name, Confidence.HIGHEST);
nodeModule.addEvidence(EvidenceType.VENDOR, "package.json", "name", name, Confidence.HIGH);
nodeModule.addEvidence(EvidenceType.VERSION, "package.json", "version", version, Confidence.HIGHEST);
nodeModule.addProjectReference(dependency.getName() + ": " + scope);
nodeModule.setName(name);
nodeModule.setVersion(version);
nodeModule.addIdentifier("npm", String.format("%s:%s", name, version), null, Confidence.HIGHEST);
return nodeModule;
}
/**
* Processes a part of package.json (as defined by JsonArray) and update the
* specified dependency with relevant info.
*
* @param engine the dependency-check engine
* @param dependency the Dependency to update
* @param jsonArray the jsonArray to parse
* @param depType the dependency type
*/
protected void processPackage(Engine engine, Dependency dependency, JsonArray jsonArray, String depType) {
final JsonObjectBuilder builder = Json.createObjectBuilder();
for (JsonString str : jsonArray.getValuesAs(JsonString.class)) {
builder.add(str.toString(), "");
}
final JsonObject jsonObject = builder.build();
processPackage(engine, dependency, jsonObject, depType);
}
/**
* Processes a part of package.json (as defined by JsonObject) and update
* the specified dependency with relevant info.
*
* @param engine the dependency-check engine
* @param dependency the Dependency to update
* @param jsonObject the jsonObject to parse
* @param depType the dependency type
*/
protected void processPackage(Engine engine, Dependency dependency, JsonObject jsonObject, String depType) {
for (int i = 0; i < jsonObject.size(); i++) {
for (Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
final String name = entry.getKey();
String version = "";
if (entry.getValue() != null && entry.getValue().getValueType() == JsonValue.ValueType.STRING) {
version = ((JsonString) entry.getValue()).getString();
}
final Dependency existing = findDependency(engine, name, version);
if (existing == null) {
final Dependency nodeModule = createDependency(dependency, name, version, depType);
engine.addDependency(nodeModule);
} else {
existing.addProjectReference(dependency.getName() + ": " + depType);
}
}
}
}
/**
* Adds information to an evidence collection from the node json
* configuration.
*
* @param dep the dependency to add the evidence
* @param t the type of evidence to add
* @param json information from node.js
* @return the actual string set into evidence
* @param key the key to obtain the data from the json information
*/
private static String addToEvidence(Dependency dep, EvidenceType t, JsonObject json, String key) {
String evidenceStr = null;
if (json.containsKey(key)) {
final JsonValue value = json.get(key);
if (value instanceof JsonString) {
evidenceStr = ((JsonString) value).getString();
dep.addEvidence(t, PACKAGE_JSON, key, evidenceStr, Confidence.HIGHEST);
} else if (value instanceof JsonObject) {
final JsonObject jsonObject = (JsonObject) value;
for (final Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
final String property = entry.getKey();
final JsonValue subValue = entry.getValue();
if (subValue instanceof JsonString) {
evidenceStr = ((JsonString) subValue).getString();
dep.addEvidence(t, PACKAGE_JSON,
String.format("%s.%s", key, property),
evidenceStr,
Confidence.HIGHEST);
} else {
LOGGER.warn("JSON sub-value not string as expected: {}", subValue);
}
}
} else {
LOGGER.warn("JSON value not string or JSON object as expected: {}", value);
}
}
return evidenceStr;
}
/**
* Locates the dependency from the list of dependencies that have been
* scanned by the engine.
*
* @param engine the dependency-check engine
* @param name the name of the dependency to find
* @param version the version of the dependency to find
* @return the identified dependency; otherwise null
*/
protected Dependency findDependency(Engine engine, String name, String version) {
for (Dependency d : engine.getDependencies()) {
if (NPM_DEPENDENCY_ECOSYSTEM.equals(d.getEcosystem()) && name.equals(d.getName()) && version != null && d.getVersion() != null) {
final String dependencyVersion = d.getVersion();
if (DependencyBundlingAnalyzer.npmVersionsMatch(version, dependencyVersion)) {
return d;
}
}
}
return null;
}
/**
* Collects evidence from the given JSON for the associated dependency.
*
* @param json the JSON that contains the evidence to collect
* @param dependency the dependency to add the evidence too
*/
public void gatherEvidence(final JsonObject json, Dependency dependency) {
if (json.containsKey("name")) {
final Object value = json.get("name");
if (value instanceof JsonString) {
final String valueString = ((JsonString) value).getString();
dependency.setName(valueString);
dependency.setPackagePath(valueString);
dependency.addEvidence(EvidenceType.PRODUCT, PACKAGE_JSON, "name", valueString, Confidence.HIGHEST);
dependency.addEvidence(EvidenceType.VENDOR, PACKAGE_JSON, "name", valueString, Confidence.HIGH);
} else {
LOGGER.warn("JSON value not string as expected: {}", value);
}
}
final String desc = addToEvidence(dependency, EvidenceType.PRODUCT, json, "description");
dependency.setDescription(desc);
addToEvidence(dependency, EvidenceType.VENDOR, json, "author");
final String version = addToEvidence(dependency, EvidenceType.VERSION, json, "version");
if (version != null) {
dependency.setVersion(version);
dependency.addIdentifier("npm", String.format("%s:%s", dependency.getName(), version), null, Confidence.HIGHEST);
}
// Adds the license if defined in package.json
if (json.containsKey("license")) {
final Object value = json.get("license");
if (value instanceof JsonString) {
dependency.setLicense(json.getString("license"));
} else if (value instanceof JsonArray) {
final JsonArray array = (JsonArray) value;
final StringBuilder sb = new StringBuilder();
boolean addComma = false;
for (int x = 0; x < array.size(); x++) {
if (!array.isNull(x)) {
if (addComma) {
sb.append(", ");
} else {
addComma = true;
}
sb.append(array.getString(x));
}
}
dependency.setLicense(sb.toString());
} else {
dependency.setLicense(json.getJsonObject("license").getString("type"));
}
}
}
}

View File

@@ -21,6 +21,7 @@ import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Set;
@@ -106,6 +107,12 @@ public class CPEAnalyzer extends AbstractAnalyzer {
* The CVE Database.
*/
private CveDB cve;
/**
* The list of ecosystems to skip during analysis. These are skipped because
* there is generally a more accurate vulnerability analyzer in the
* pipeline.
*/
private List<String> skipEcosystems;
/**
* Returns the name of this analyzer.
@@ -136,6 +143,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
*/
@Override
public void prepareAnalyzer(Engine engine) throws InitializationException {
super.prepareAnalyzer(engine);
try {
this.open(engine.getDatabase());
} catch (IOException ex) {
@@ -145,6 +153,13 @@ public class CPEAnalyzer extends AbstractAnalyzer {
LOGGER.debug("Exception accessing the database", ex);
throw new InitializationException("An exception occurred accessing the database", ex);
}
final String[] tmp = engine.getSettings().getArray(Settings.KEYS.ECOSYSTEM_SKIP_CPEANALYZER);
if (tmp == null) {
skipEcosystems = new ArrayList<>();
} else {
LOGGER.info("Skipping CPE Analysis for {}", tmp);
skipEcosystems = Arrays.asList(tmp);
}
}
/**
@@ -525,6 +540,9 @@ public class CPEAnalyzer extends AbstractAnalyzer {
*/
@Override
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
if (skipEcosystems.contains(dependency.getEcosystem())) {
return;
}
try {
determineCPE(dependency);
} catch (CorruptIndexException ex) {

View File

@@ -17,6 +17,9 @@
*/
package org.owasp.dependencycheck.analyzer;
import com.vdurmont.semver4j.Semver;
import com.vdurmont.semver4j.Semver.SemverType;
import com.vdurmont.semver4j.SemverException;
import java.io.File;
import java.util.Set;
import java.util.regex.Matcher;
@@ -135,6 +138,16 @@ public class DependencyBundlingAnalyzer extends AbstractDependencyComparingAnaly
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
return true; //since we merged into the next dependency - skip forward to the next in mainIterator
}
} else if (ecoSystemIs(AbstractNpmAnalyzer.NPM_DEPENDENCY_ECOSYSTEM, dependency, nextDependency)
&& namesAreEqual(dependency, nextDependency)
&& npmVersionsMatch(dependency.getVersion(), nextDependency.getVersion())) {
if (!dependency.isVirtual()) {
DependencyMergingAnalyzer.mergeDependencies(dependency, nextDependency, dependenciesToRemove);
} else {
DependencyMergingAnalyzer.mergeDependencies(nextDependency, dependency, dependenciesToRemove);
return true;
}
}
return false;
}
@@ -149,7 +162,8 @@ public class DependencyBundlingAnalyzer extends AbstractDependencyComparingAnaly
* removed from the main analysis loop, this function adds to this
* collection
*/
private void mergeDependencies(final Dependency dependency, final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) {
public static void mergeDependencies(final Dependency dependency,
final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) {
dependency.addRelatedDependency(relatedDependency);
for (Dependency d : relatedDependency.getRelatedDependencies()) {
dependency.addRelatedDependency(d);
@@ -158,7 +172,9 @@ public class DependencyBundlingAnalyzer extends AbstractDependencyComparingAnaly
if (dependency.getSha1sum().equals(relatedDependency.getSha1sum())) {
dependency.addAllProjectReferences(relatedDependency.getProjectReferences());
}
dependenciesToRemove.add(relatedDependency);
if (dependenciesToRemove != null) {
dependenciesToRemove.add(relatedDependency);
}
}
/**
@@ -452,4 +468,105 @@ public class DependencyBundlingAnalyzer extends AbstractDependencyComparingAnaly
return filePath != null && filePath.matches(".*\\.(ear|war)[\\\\/].*");
}
/**
* Determine if the dependency ecosystem is equal in the given dependencies.
*
* @param ecoSystem the ecosystem to validate against
* @param dependency a dependency to compare
* @param nextDependency a dependency to compare
* @return true if the ecosystem is equal in both dependencies; otherwise
* false
*/
private boolean ecoSystemIs(String ecoSystem, Dependency dependency, Dependency nextDependency) {
return ecoSystem.equals(dependency.getEcosystem()) && ecoSystem.equals(nextDependency.getEcosystem());
}
/**
* Determine if the dependency name is equal in the given dependencies.
*
* @param dependency a dependency to compare
* @param nextDependency a dependency to compare
* @return true if the name is equal in both dependencies; otherwise false
*/
private boolean namesAreEqual(Dependency dependency, Dependency nextDependency) {
return dependency.getName() != null && dependency.getName().equals(nextDependency.getName());
}
/**
* Determine if the dependency version is equal in the given dependencies.
* This method attempts to evaluate version range checks.
*
* @param current a dependency version to compare
* @param next a dependency version to compare
* @return true if the version is equal in both dependencies; otherwise
* false
*/
public static boolean npmVersionsMatch(String current, String next) {
String left = current;
String right = next;
if (left == null || right == null) {
return false;
}
if (left.equals(right) || "*".equals(left) || "*".equals(right)) {
return true;
}
if (left.contains(" ")) { // we have a version string from package.json
if (right.contains(" ")) { // we can't evaluate this ">=1.5.4 <2.0.0" vs "2 || 3"
return false;
}
if (!right.matches("^\\d.*$")) {
right = stripLeadingNonNumeric(right);
if (right == null) {
return false;
}
}
try {
final Semver v = new Semver(right, SemverType.NPM);
return v.satisfies(left);
} catch (SemverException ex) {
LOGGER.trace("ignore", ex);
}
} else {
if (!left.matches("^\\d.*$")) {
left = stripLeadingNonNumeric(left);
if (left == null) {
return false;
}
}
try {
Semver v = new Semver(left, SemverType.NPM);
if (v.satisfies(right)) {
return true;
}
if (!right.contains((" "))) {
left = current;
right = stripLeadingNonNumeric(right);
if (right != null) {
v = new Semver(right, SemverType.NPM);
return v.satisfies(left);
}
}
} catch (SemverException ex) {
LOGGER.trace("ignore", ex);
}
}
return false;
}
/**
* Strips leading non-numeric values from the start of the string. If no
* numbers are present this will return null.
*
* @param str the string to modify
* @return the string without leading non-numeric characters
*/
private static String stripLeadingNonNumeric(String str) {
for (int x = 0; x < str.length(); x++) {
if (Character.isDigit(str.codePointAt(x))) {
return str.substring(x);
}
}
return null;
}
}

View File

@@ -120,7 +120,8 @@ public class DependencyMergingAnalyzer extends AbstractDependencyComparingAnalyz
* removed from the main analysis loop, this function adds to this
* collection
*/
private void mergeDependencies(final Dependency dependency, final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) {
public static void mergeDependencies(final Dependency dependency, final Dependency relatedDependency,
final Set<Dependency> dependenciesToRemove) {
LOGGER.debug("Merging '{}' into '{}'", relatedDependency.getFilePath(), dependency.getFilePath());
dependency.addRelatedDependency(relatedDependency);
for (Evidence e : relatedDependency.getEvidence(EvidenceType.VENDOR)) {
@@ -137,10 +138,10 @@ public class DependencyMergingAnalyzer extends AbstractDependencyComparingAnalyz
dependency.addRelatedDependency(d);
relatedDependency.removeRelatedDependencies(d);
}
if (dependency.getSha1sum().equals(relatedDependency.getSha1sum())) {
dependency.addAllProjectReferences(relatedDependency.getProjectReferences());
dependency.addAllProjectReferences(relatedDependency.getProjectReferences());
if (dependenciesToRemove != null) {
dependenciesToRemove.add(relatedDependency);
}
dependenciesToRemove.add(relatedDependency);
}
/**

View File

@@ -20,7 +20,6 @@ package org.owasp.dependencycheck.analyzer;
import org.apache.commons.io.FileUtils;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings;
@@ -30,16 +29,22 @@ import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import javax.annotation.concurrent.ThreadSafe;
import javax.json.Json;
import javax.json.JsonException;
import javax.json.JsonObject;
import javax.json.JsonReader;
import javax.json.JsonString;
import javax.json.JsonValue;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.Engine.Mode;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.Checksum;
import org.owasp.dependencycheck.utils.InvalidSettingException;
/**
* Used to analyze Node Package Manager (npm) package.json files, and collect
@@ -48,8 +53,7 @@ import org.owasp.dependencycheck.dependency.EvidenceType;
* @author Dale Visser
*/
@ThreadSafe
@Retired
public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
public class NodePackageAnalyzer extends AbstractNpmAnalyzer {
/**
* The logger.
@@ -59,7 +63,7 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
* A descriptor for the type of dependencies processed or added by this
* analyzer.
*/
public static final String DEPENDENCY_ECOSYSTEM = "npm";
public static final String DEPENDENCY_ECOSYSTEM = NPM_DEPENDENCY_ECOSYSTEM;
/**
* The name of the analyzer.
*/
@@ -73,10 +77,19 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
*/
public static final String PACKAGE_JSON = "package.json";
/**
* Filter that detects files named "package.json".
* The file name to scan.
*/
public static final String PACKAGE_LOCK_JSON = "package-lock.json";
/**
* The file name to scan.
*/
public static final String SHRINKWRAP_JSON = "npm-shrinkwrap.json";
/**
* Filter that detects files named "package-lock.json" or
* "npm-shrinkwrap.json".
*/
private static final FileFilter PACKAGE_JSON_FILTER = FileFilterBuilder.newInstance()
.addFilenames(PACKAGE_JSON).build();
.addFilenames(PACKAGE_LOCK_JSON, SHRINKWRAP_JSON).build();
/**
* Returns the FileFilter
@@ -88,9 +101,35 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
return PACKAGE_JSON_FILTER;
}
/**
* Performs validation on the configuration to ensure that the correct
* analyzers are in place.
*
* @param engine the dependency-check engine
* @throws InitializationException thrown if there is a configuration error
*/
@Override
protected void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
// NO-OP
if (engine.getMode() != Mode.EVIDENCE_COLLECTION) {
try {
final Settings settings = engine.getSettings();
final String[] tmp = settings.getArray(Settings.KEYS.ECOSYSTEM_SKIP_CPEANALYZER);
if (tmp != null) {
final List<String> skipEcosystems = Arrays.asList(tmp);
if (skipEcosystems.contains(DEPENDENCY_ECOSYSTEM)
&& !settings.getBoolean(Settings.KEYS.ANALYZER_NSP_PACKAGE_ENABLED)) {
LOGGER.debug("NodePackageAnalyzer enabled without a corresponding vulnerability analyzer");
final String msg = "Invalid Configuration: enabling the Node Package Analyzer without "
+ "using the NSP Analyzer is not supported.";
throw new InitializationException(msg);
} else if (!skipEcosystems.contains(DEPENDENCY_ECOSYSTEM)) {
LOGGER.warn("Using the CPE Analyzer with Node.js can result in many false positives.");
}
}
} catch (InvalidSettingException ex) {
throw new InitializationException("Unable to read configuration settings", ex);
}
}
}
/**
@@ -114,10 +153,10 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
}
/**
* Returns the key used in the properties file to reference the analyzer's
* enabled property.
* Returns the key used in the properties file to reference the enabled
* property for the analyzer.
*
* @return the analyzer's enabled property setting key
* @return the enabled property setting key for the analyzer
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
@@ -126,29 +165,31 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
@Override
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
dependency.setEcosystem(DEPENDENCY_ECOSYSTEM);
final File file = dependency.getActualFile();
if (!file.isFile() || file.length() == 0) {
engine.removeDependency(dependency);
final File dependencyFile = dependency.getActualFile();
if (!dependencyFile.isFile() || dependencyFile.length() == 0 || !shouldProcess(dependencyFile)) {
return;
}
try (JsonReader jsonReader = Json.createReader(FileUtils.openInputStream(file))) {
final JsonObject json = jsonReader.readObject();
if (json.containsKey("name")) {
final Object value = json.get("name");
if (value instanceof JsonString) {
final String valueString = ((JsonString) value).getString();
dependency.setName(valueString);
dependency.addEvidence(EvidenceType.PRODUCT, PACKAGE_JSON, "name", valueString, Confidence.HIGHEST);
dependency.addEvidence(EvidenceType.VENDOR, PACKAGE_JSON, "name_project",
String.format("%s_project", valueString), Confidence.LOW);
} else {
LOGGER.warn("JSON value not string as expected: {}", value);
}
final File baseDir = dependencyFile.getParentFile();
if (PACKAGE_LOCK_JSON.equals(dependency.getFileName())) {
final File shrinkwrap = new File(baseDir, SHRINKWRAP_JSON);
if (shrinkwrap.exists()) {
return;
}
addToEvidence(dependency, EvidenceType.PRODUCT, json, "description");
addToEvidence(dependency, EvidenceType.VENDOR, json, "author");
final String version = addToEvidence(dependency, EvidenceType.VERSION, json, "version");
dependency.setVersion(version);
}
final File nodeModules = new File(baseDir, "node_modules");
if (!nodeModules.isDirectory()) {
LOGGER.warn("Analyzing `{}` - however, the node_modules directory does not exist. "
+ "Please run `npm install` prior to running dependency-check", dependencyFile.toString());
return;
}
try (JsonReader jsonReader = Json.createReader(FileUtils.openInputStream(dependencyFile))) {
final JsonObject json = jsonReader.readObject();
final String parentName = json.getString("name");
final String parentVersion = json.getString("version");
final String parentPackage = String.format("%s:%s", parentName, parentVersion);
processDependencies(json, baseDir, dependencyFile, parentPackage, engine);
} catch (JsonException e) {
LOGGER.warn("Failed to parse package.json file.", e);
} catch (IOException e) {
@@ -157,41 +198,74 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
}
/**
* Adds information to an evidence collection from the node json
* configuration.
* Process the dependencies in the lock file by first parsing its
* dependencies and then finding the package.json for the module and adding
* it as a dependency.
*
* @param dep the dependency to add the evidence
* @param t the type of evidence to add
* @param json information from node.js
* @return the actual string set into evidence
* @param key the key to obtain the data from the json information
* @param json the data to process
* @param baseDir the base directory being scanned
* @param rootFile the root package-lock/npm-shrinkwrap being analyzed
* @param parentPackage the parent package name of the current node
* @param engine a reference to the dependency-check engine
* @throws AnalysisException thrown if there is an exception
*/
private String addToEvidence(Dependency dep, EvidenceType t, JsonObject json, String key) {
String evidenceStr = null;
if (json.containsKey(key)) {
final JsonValue value = json.get(key);
if (value instanceof JsonString) {
evidenceStr = ((JsonString) value).getString();
dep.addEvidence(t, PACKAGE_JSON, key, evidenceStr, Confidence.HIGHEST);
} else if (value instanceof JsonObject) {
final JsonObject jsonObject = (JsonObject) value;
for (final Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
final String property = entry.getKey();
final JsonValue subValue = entry.getValue();
if (subValue instanceof JsonString) {
evidenceStr = ((JsonString) subValue).getString();
dep.addEvidence(t, PACKAGE_JSON,
String.format("%s.%s", key, property),
evidenceStr,
Confidence.HIGHEST);
} else {
LOGGER.warn("JSON sub-value not string as expected: {}", subValue);
}
private void processDependencies(JsonObject json, File baseDir, File rootFile,
String parentPackage, Engine engine) throws AnalysisException {
if (json.containsKey("dependencies")) {
final JsonObject deps = json.getJsonObject("dependencies");
for (Map.Entry<String, JsonValue> entry : deps.entrySet()) {
final JsonObject jo = (JsonObject) entry.getValue();
final String name = entry.getKey();
final String version = jo.getString("version");
final File base = Paths.get(baseDir.getPath(), "node_modules", name).toFile();
final File f = new File(base, PACKAGE_JSON);
if (jo.containsKey("dependencies")) {
final String subPackageName = String.format("%s/%s:%s", parentPackage, name, version);
processDependencies(jo, base, rootFile, subPackageName, engine);
}
Dependency child;
if (f.exists()) {
//TOOD - we should use the integrity value instead of calculating the SHA1/MD5
child = new Dependency(f);
try (JsonReader jr = Json.createReader(FileUtils.openInputStream(f))) {
final JsonObject childJson = jr.readObject();
gatherEvidence(childJson, child);
} catch (JsonException e) {
LOGGER.warn("Failed to parse package.json file from dependency.", e);
} catch (IOException e) {
throw new AnalysisException("Problem occurred while reading dependency file.", e);
}
} else {
LOGGER.warn("Unable to find node module: {}", f.toString());
child = new Dependency(rootFile, true);
//TOOD - we should use the integrity value instead of calculating the SHA1/MD5
child.setSha1sum(Checksum.getSHA1Checksum(String.format("%s:%s", name, version)));
child.setMd5sum(Checksum.getMD5Checksum(String.format("%s:%s", name, version)));
child.addEvidence(EvidenceType.VENDOR, rootFile.getName(), "name", name, Confidence.HIGHEST);
child.addEvidence(EvidenceType.PRODUCT, rootFile.getName(), "name", name, Confidence.HIGHEST);
child.addEvidence(EvidenceType.VERSION, rootFile.getName(), "version", version, Confidence.HIGHEST);
}
child.setName(name);
child.setVersion(version);
child.addProjectReference(parentPackage);
child.setEcosystem(DEPENDENCY_ECOSYSTEM);
final Dependency existing = findDependency(engine, name, version);
if (existing != null) {
if (existing.isVirtual()) {
DependencyMergingAnalyzer.mergeDependencies(child, existing, null);
engine.removeDependency(existing);
engine.addDependency(child);
} else {
DependencyBundlingAnalyzer.mergeDependencies(existing, child, null);
}
} else {
engine.addDependency(child);
}
} else {
LOGGER.warn("JSON value not string or JSON object as expected: {}", value);
}
}
return evidenceStr;
}
}

View File

@@ -23,9 +23,7 @@ import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.data.nsp.Advisory;
import org.owasp.dependencycheck.data.nsp.NspSearch;
import org.owasp.dependencycheck.data.nsp.SanitizePackage;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
@@ -39,18 +37,14 @@ import java.net.MalformedURLException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import javax.annotation.concurrent.ThreadSafe;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonException;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.json.JsonReader;
import javax.json.JsonString;
import javax.json.JsonValue;
import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.URLConnectionFailureException;
/**
@@ -60,7 +54,7 @@ import org.owasp.dependencycheck.utils.URLConnectionFailureException;
* @author Steve Springett
*/
@ThreadSafe
public class NspAnalyzer extends AbstractFileTypeAnalyzer {
public class NspAnalyzer extends AbstractNpmAnalyzer {
/**
* The logger.
@@ -71,7 +65,11 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
* The default URL to the NSP check API.
*/
public static final String DEFAULT_URL = "https://api.nodesecurity.io/check";
/**
* A descriptor for the type of dependencies processed or added by this
* analyzer.
*/
public static final String DEPENDENCY_ECOSYSTEM = NPM_DEPENDENCY_ECOSYSTEM;
/**
* The file name to scan.
*/
@@ -113,6 +111,16 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
setEnabled(false);
throw new InitializationException("The configured URL to Node Security Platform is malformed", ex);
}
try {
final Settings settings = engine.getSettings();
final boolean nodeEnabled = settings.getBoolean(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED);
if (!nodeEnabled) {
LOGGER.warn("The Node Package Analyzer has been disabled; the resulting report will only "
+ " contain the known vulnerable dependency - not a bill of materials for the node project.");
}
} catch (InvalidSettingException ex) {
throw new InitializationException("Unable to read configuration settings", ex);
}
}
/**
@@ -136,10 +144,10 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
}
/**
* Returns the key used in the properties file to reference the analyzer's
* enabled property.x
* Returns the key used in the properties file to determine if the analyzer
* is enabled.
*
* @return the analyzer's enabled property setting key
* @return the enabled property setting key for the analyzer
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
@@ -148,17 +156,13 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
@Override
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
engine.removeDependency(dependency);
final File file = dependency.getActualFile();
if (!file.isFile() || file.length() == 0) {
if (!file.isFile() || file.length() == 0 || !shouldProcess(file)) {
return;
}
try (JsonReader jsonReader = Json.createReader(FileUtils.openInputStream(file))) {
// Do not scan the node_modules directory
if (file.getCanonicalPath().contains(File.separator + "node_modules" + File.separator)) {
LOGGER.debug("Skipping analysis of node module: " + file.getCanonicalPath());
return;
}
try (JsonReader jsonReader = Json.createReader(FileUtils.openInputStream(file))) {
// Retrieves the contents of package.json from the Dependency
final JsonObject packageJson = jsonReader.readObject();
@@ -192,77 +196,22 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
* Create a single vulnerable software object - these do not use CPEs unlike the NVD.
*/
final VulnerableSoftware vs = new VulnerableSoftware();
//vs.setVersion(advisory.getVulnerableVersions());
vs.setUpdate(advisory.getPatchedVersions());
//TODO consider changing this to available versions on the dependency
// - the update is a part of the version, not versions to update to
//vs.setUpdate(advisory.getPatchedVersions());
vs.setName(advisory.getModule() + ":" + advisory.getVulnerableVersions());
vuln.setVulnerableSoftware(new HashSet<>(Arrays.asList(vs)));
// Add the vulnerability to package.json
dependency.addVulnerability(vuln);
}
/*
* Adds evidence about the node package itself, not any of the modules.
*/
if (packageJson.containsKey("name")) {
final Object value = packageJson.get("name");
if (value instanceof JsonString) {
final String valueString = ((JsonString) value).getString();
dependency.addEvidence(EvidenceType.PRODUCT, PACKAGE_JSON, "name", valueString, Confidence.HIGHEST);
dependency.addEvidence(EvidenceType.VENDOR, PACKAGE_JSON, "name_project",
String.format("%s_project", valueString), Confidence.LOW);
final Dependency existing = findDependency(engine, advisory.getModule(), advisory.getVersion());
if (existing == null) {
final Dependency nodeModule = createDependency(dependency, advisory.getModule(), advisory.getVersion(), "transitive");
nodeModule.addVulnerability(vuln);
engine.addDependency(nodeModule);
} else {
LOGGER.warn("JSON value not string as expected: {}", value);
existing.addVulnerability(vuln);
}
}
/*
* Processes the dependencies objects in package.json and adds all the modules as related dependencies
*/
if (packageJson.containsKey("dependencies")) {
final JsonObject dependencies = packageJson.getJsonObject("dependencies");
processPackage(dependency, dependencies, "dependencies");
}
if (packageJson.containsKey("devDependencies")) {
final JsonObject dependencies = packageJson.getJsonObject("devDependencies");
processPackage(dependency, dependencies, "devDependencies");
}
if (packageJson.containsKey("optionalDependencies")) {
final JsonObject dependencies = packageJson.getJsonObject("optionalDependencies");
processPackage(dependency, dependencies, "optionalDependencies");
}
if (packageJson.containsKey("peerDependencies")) {
final JsonObject dependencies = packageJson.getJsonObject("peerDependencies");
processPackage(dependency, dependencies, "peerDependencies");
}
if (packageJson.containsKey("bundleDependencies")) {
final JsonArray dependencies = packageJson.getJsonArray("bundleDependencies");
processPackage(dependency, dependencies, "bundleDependencies");
}
if (packageJson.containsKey("bundledDependencies")) {
final JsonArray dependencies = packageJson.getJsonArray("bundledDependencies");
processPackage(dependency, dependencies, "bundledDependencies");
}
/*
* Adds the license if defined in package.json
*/
if (packageJson.containsKey("license")) {
final Object value = packageJson.get("license");
if (value instanceof JsonString) {
dependency.setLicense(packageJson.getString("license"));
} else {
dependency.setLicense(packageJson.getJsonObject("license").getString("type"));
}
}
/*
* Adds general evidence to about the package.
*/
addToEvidence(dependency, EvidenceType.PRODUCT, packageJson, "description");
addToEvidence(dependency, EvidenceType.VENDOR, packageJson, "author");
addToEvidence(dependency, EvidenceType.VERSION, packageJson, "version");
dependency.setDisplayFileName(String.format("%s/%s", file.getParentFile().getName(), file.getName()));
} catch (URLConnectionFailureException e) {
this.setEnabled(false);
throw new AnalysisException(e.getMessage(), e);
@@ -274,98 +223,4 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
throw new AnalysisException(String.format("Failed to parse %s file.", file.getPath()), e);
}
}
/**
* Processes a part of package.json (as defined by JsonArray) and update the
* specified dependency with relevant info.
*
* @param dependency the Dependency to update
* @param jsonArray the jsonArray to parse
* @param depType the dependency type
*/
private void processPackage(Dependency dependency, JsonArray jsonArray, String depType) {
final JsonObjectBuilder builder = Json.createObjectBuilder();
for (JsonString str : jsonArray.getValuesAs(JsonString.class)) {
builder.add(str.toString(), "");
}
final JsonObject jsonObject = builder.build();
processPackage(dependency, jsonObject, depType);
}
/**
* Processes a part of package.json (as defined by JsonObject) and update
* the specified dependency with relevant info.
*
* @param dependency the Dependency to update
* @param jsonObject the jsonObject to parse
* @param depType the dependency type
*/
private void processPackage(Dependency dependency, JsonObject jsonObject, String depType) {
for (int i = 0; i < jsonObject.size(); i++) {
for (Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
/*
* Create identifies that include the npm module and version. Since these are defined,
* assign the highest confidence.
*/
final Identifier moduleName = new Identifier("npm", "Module", null, entry.getKey());
moduleName.setConfidence(Confidence.HIGHEST);
String version = "";
if (entry.getValue() != null && entry.getValue().getValueType() == JsonValue.ValueType.STRING) {
version = ((JsonString) entry.getValue()).getString();
}
final Identifier moduleVersion = new Identifier("npm", "Version", null, version);
moduleVersion.setConfidence(Confidence.HIGHEST);
final Identifier moduleDepType = new Identifier("npm", "Scope", null, depType);
moduleVersion.setConfidence(Confidence.HIGHEST);
/*
* Create related dependencies for each module defined in package.json. The path to the related
* dependency will not actually exist but needs to be unique (due to the use of Set in Dependency).
* The use of related dependencies is a way to specify the actual software BOM in package.json.
*/
//TODO is this actually correct? or should these be transitive dependencies?
final Dependency nodeModule = new Dependency(new File(dependency.getActualFile() + "#" + entry.getKey()), true);
nodeModule.setDisplayFileName(entry.getKey());
nodeModule.addIdentifier(moduleName);
nodeModule.addIdentifier(moduleVersion);
nodeModule.addIdentifier(moduleDepType);
dependency.addRelatedDependency(nodeModule);
}
}
}
/**
* Adds information to an evidence collection from the node json
* configuration.
*
* @param dep the dependency to which the evidence will be added
* @param type the type of evidence to be added
* @param json information from node.js
* @param key the key to obtain the data from the json information
*/
private void addToEvidence(Dependency dep, EvidenceType type, JsonObject json, String key) {
if (json.containsKey(key)) {
final JsonValue value = json.get(key);
if (value instanceof JsonString) {
dep.addEvidence(type, PACKAGE_JSON, key, ((JsonString) value).getString(), Confidence.HIGHEST);
} else if (value instanceof JsonObject) {
final JsonObject jsonObject = (JsonObject) value;
for (final Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
final String property = entry.getKey();
final JsonValue subValue = entry.getValue();
if (subValue instanceof JsonString) {
dep.addEvidence(type, PACKAGE_JSON,
String.format("%s.%s", key, property),
((JsonString) subValue).getString(),
Confidence.HIGHEST);
} else {
LOGGER.warn("JSON sub-value not string as expected: {}", subValue);
}
}
} else {
LOGGER.warn("JSON value not string or JSON object as expected: {}", value);
}
}
}
}

View File

@@ -67,7 +67,7 @@ public class SwiftPackageManagerAnalyzer extends AbstractFileTypeAnalyzer {
public static final String SPM_FILE_NAME = "Package.swift";
/**
* Filter that detects files named "package.json".
* Filter that detects files named "Package.swift".
*/
private static final FileFilter SPM_FILE_FILTER = FileFilterBuilder.newInstance().addFilenames(SPM_FILE_NAME).build();

View File

@@ -123,6 +123,7 @@ public class NspSearch {
try (InputStream in = new BufferedInputStream(conn.getInputStream());
JsonReader jsonReader = Json.createReader(in)) {
final JsonArray array = jsonReader.readArray();
if (array != null) {
for (int i = 0; i < array.size(); i++) {
final JsonObject object = array.getJsonObject(i);

View File

@@ -123,9 +123,13 @@ public class EscapeTool {
*/
public String csv(String text) {
if (text == null || text.isEmpty()) {
return text;
return "\"\"";
}
return StringEscapeUtils.escapeCsv(text.trim().replace("\n", " "));
final String str = text.trim().replace("\n", " ");
if (str.length() == 0) {
return "\"\"";
}
return StringEscapeUtils.escapeCsv(str);
}
/**
@@ -137,7 +141,7 @@ public class EscapeTool {
*/
public String csvIdentifiers(Set<Identifier> ids) {
if (ids == null || ids.isEmpty()) {
return "";
return "\"\"";
}
boolean addComma = false;
final StringBuilder sb = new StringBuilder();
@@ -151,6 +155,9 @@ public class EscapeTool {
sb.append(id.getValue());
}
}
if (sb.length() == 0) {
return "\"\"";
}
return StringEscapeUtils.escapeCsv(sb.toString());
}
@@ -163,7 +170,7 @@ public class EscapeTool {
*/
public String csvCpe(Set<Identifier> ids) {
if (ids == null || ids.isEmpty()) {
return "";
return "\"\"";
}
boolean addComma = false;
final StringBuilder sb = new StringBuilder();
@@ -177,6 +184,9 @@ public class EscapeTool {
sb.append(id.getValue());
}
}
if (sb.length() == 0) {
return "\"\"";
}
return StringEscapeUtils.escapeCsv(sb.toString());
}
@@ -189,7 +199,7 @@ public class EscapeTool {
*/
public String csvCpeConfidence(Set<Identifier> ids) {
if (ids == null || ids.isEmpty()) {
return "";
return "\"\"";
}
boolean addComma = false;
final StringBuilder sb = new StringBuilder();
@@ -203,6 +213,9 @@ public class EscapeTool {
sb.append(id.getConfidence());
}
}
if (sb.length() == 0) {
return "\"\"";
}
return StringEscapeUtils.escapeCsv(sb.toString());
}
@@ -215,12 +228,12 @@ public class EscapeTool {
*/
public String csvGav(Set<Identifier> ids) {
if (ids == null || ids.isEmpty()) {
return "";
return "\"\"";
}
boolean addComma = false;
final StringBuilder sb = new StringBuilder();
for (Identifier id : ids) {
if ("maven".equals(id.getType())) {
if ("maven".equals(id.getType()) || "npm".equals(id.getType())) {
if (addComma) {
sb.append(", ");
} else {
@@ -229,7 +242,9 @@ public class EscapeTool {
sb.append(id.getValue());
}
}
if (sb.length() == 0) {
return "\"\"";
}
return StringEscapeUtils.escapeCsv(sb.toString());
}
}

View File

@@ -126,4 +126,6 @@ analyzer.nvdcve.enabled=true
analyzer.vulnerabilitysuppression.enabled=true
updater.nvdcve.enabled=true
updater.versioncheck.enabled=true
analyzer.versionfilter.enabled=true
analyzer.versionfilter.enabled=true
ecosystem.skip.cpeanalyzer=npm

View File

@@ -17,7 +17,7 @@ Copyright (c) 2017 Jeremy Long. All Rights Reserved.
@author Jeremy Long <jeremy.long@owasp.org>
@version 1 *###
"Project","ScanDate","DependencyName","DependencyPath","Description","License","Md5","Sha1","Identifiers","CPE","CVE","CWE","Vulnerability","Source","Severity","CVSSv2","GAV","CPE Confidence","Evidence Count"
"Project","ScanDate","DependencyName","DependencyPath","Description","License","Md5","Sha1","Identifiers","CPE","CVE","CWE","Vulnerability","Source","Severity","CVSSv2","Build Coordinates","CPE Confidence","Evidence Count"
#macro(writeSev $score)#if($score<4.0)"Low"#elseif($score>=7.0)"High"#else"Medium"#end#end
#foreach($dependency in $dependencies)#if($dependency.getVulnerabilities().size()>0)
#foreach($vuln in $dependency.getVulnerabilities(true))

View File

@@ -623,7 +623,7 @@ Getting Help: <a href="https://groups.google.com/forum/#!forum/dependency-check"
<thead><tr style="text-align:left">
<th class="sortable" data-sort="string" title="The name of the dependency">Dependency</th>
<th class="sortable" data-sort="string" title="The Common Platform Enumeration">CPE</th>
<th class="sortable" data-sort="string" title="The Maven GAV Coordinates">GAV</th>
<th class="sortable" data-sort="string" title="The Build Coordinates">Coordinates</th>
<th class="sortable" data-sort="int" title="The highest CVE Severity">Highest Severity</th>
<th class="sortable" data-sort="int" title="The number of Common Vulnerability and Exposure (CVE) entries">CVE Count</th>
<th class="sortable" data-sort="string" title="The confidence rating dependency-check has for the identified CPE">CPE Confidence</th>
@@ -638,7 +638,7 @@ Getting Help: <a href="https://groups.google.com/forum/#!forum/dependency-check"
#set($cpeIdConf="")
#set($sortValue="")
#foreach($id in $dependency.getIdentifiers())
#if ($id.type!="maven")
#if ($id.type!="maven" && $id.type!="npm")
#set($sortValue=$sortValue+$id.value)
#end
#end
@@ -646,7 +646,7 @@ Getting Help: <a href="https://groups.google.com/forum/#!forum/dependency-check"
#set($sortValue="")
#set($cpeSort=0)
#foreach($id in $dependency.getIdentifiers())
#if ($id.type=="maven")
#if ($id.type=="maven" || $id.type=="npm")
#if ($mavenlink=="" || !$mavenlink.url)
#set($mavenlink=$id)
#end
@@ -778,7 +778,7 @@ Getting Help: <a href="https://groups.google.com/forum/#!forum/dependency-check"
</li>
#end
#if ($id.type=="npm")
<li>$enc.html($id.value): $enc.html($id.description)</li>
<li>$enc.html($id.value)</li>
#end
#end
</ul>

View File

@@ -41,22 +41,15 @@
"identifiers": [
#set($loopCount=0)
#foreach($id in $related.getIdentifiers())
#if ($id.type=="maven")
#if ($id.type=="maven" || $id.type=="npm")
#set($loopCount=$loopCount+1)
#if($loopCount>1),#end
{
"type": "$enc.json($id.type)",
"name": "$id.value"
"id": "$id.value"
#if ($id.url),"url": "$enc.json($id.url)"#end
#if ($id.notes),"notes": "$enc.json($id.notes)"#end
}
#end
#if ($id.type=="npm")
#set($loopCount=$loopCount+1)
#if($loopCount>1),#end
{
"id":"$enc.json($id.value)"
,"description":"$enc.json($id.description)"
#if ($id.description),"description":"$enc.json($id.description)"#end
}
#end
#end

View File

@@ -76,16 +76,12 @@ public class AnalyzerServiceTest extends BaseDBTestCase {
AnalyzerService instance = new AnalyzerService(Thread.currentThread().getContextClassLoader(), getSettings());
List<Analyzer> result = instance.getAnalyzers();
String experimental = "CMake Analyzer";
String retired = "Node.js Package Analyzer";
boolean found = false;
boolean retiredFound = false;
for (Analyzer a : result) {
if (experimental.equals(a.getName())) {
found = true;
}
if (retired.equals(a.getName())) {
retiredFound = true;
}
}
assertFalse("Experimental analyzer loaded when set to false", found);
assertFalse("Retired analyzer loaded when set to false", retiredFound);
@@ -99,13 +95,10 @@ public class AnalyzerServiceTest extends BaseDBTestCase {
if (experimental.equals(a.getName())) {
found = true;
}
if (retired.equals(a.getName())) {
retiredFound = true;
}
}
assertTrue("Experimental analyzer not loaded when set to true", found);
assertFalse("Retired analyzer loaded when set to false", retiredFound);
getSettings().setBoolean(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, false);
getSettings().setBoolean(Settings.KEYS.ANALYZER_RETIRED_ENABLED, true);
instance = new AnalyzerService(Thread.currentThread().getContextClassLoader(), getSettings());
@@ -116,11 +109,8 @@ public class AnalyzerServiceTest extends BaseDBTestCase {
if (experimental.equals(a.getName())) {
found = true;
}
if (retired.equals(a.getName())) {
retiredFound = true;
}
}
assertFalse("Experimental analyzer loaded when set to false", found);
assertTrue("Retired analyzer not loaded when set to true", retiredFound);
//assertTrue("Retired analyzer not loaded when set to true", retiredFound);
}
}

View File

@@ -29,6 +29,7 @@ import java.io.File;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.*;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.dependency.EvidenceType;
/**
@@ -42,6 +43,10 @@ public class NodePackageAnalyzerTest extends BaseTest {
* The analyzer to test.
*/
private NodePackageAnalyzer analyzer;
/**
* A reference to the engine.
*/
private Engine engine;
/**
* Correctly setup the analyzer for testing.
@@ -52,14 +57,15 @@ public class NodePackageAnalyzerTest extends BaseTest {
@Override
public void setUp() throws Exception {
super.setUp();
engine = new Engine(this.getSettings());
analyzer = new NodePackageAnalyzer();
analyzer.setFilesMatched(true);
analyzer.initialize(getSettings());
analyzer.prepare(null);
analyzer.prepare(engine);
}
/**
* Cleanup the analyzer's temp files, etc.
* Cleanup temp files, close resources, etc.
*
* @throws Exception thrown if there is a problem
*/
@@ -67,6 +73,7 @@ public class NodePackageAnalyzerTest extends BaseTest {
@Override
public void tearDown() throws Exception {
analyzer.close();
engine.close();
super.tearDown();
}
@@ -83,7 +90,8 @@ public class NodePackageAnalyzerTest extends BaseTest {
*/
@Test
public void testSupportsFiles() {
assertThat(analyzer.accept(new File("package.json")), is(true));
assertThat(analyzer.accept(new File("package-lock.json")), is(true));
assertThat(analyzer.accept(new File("npm-shrinkwrap.json")), is(true));
}
/**
@@ -92,17 +100,39 @@ public class NodePackageAnalyzerTest extends BaseTest {
* @throws AnalysisException is thrown when an exception occurs.
*/
@Test
public void testAnalyzePackageJson() throws AnalysisException {
final Dependency result = new Dependency(BaseTest.getResourceAsFile(this,
"nodejs/node_modules/dns-sync/package.json"));
analyzer.analyze(result, null);
public void testAnalyzeShrinkwrapJson() throws AnalysisException {
final Dependency toScan = new Dependency(BaseTest.getResourceAsFile(this,
"nodejs/npm-shrinkwrap.json"));
analyzer.analyze(toScan, engine);
assertEquals("Expected 1 dependency", engine.getDependencies().length, 1);
final Dependency result = engine.getDependencies()[0];
final String vendorString = result.getEvidence(EvidenceType.VENDOR).toString();
assertThat(vendorString, containsString("Sanjeev Koranga"));
assertThat(vendorString, containsString("dns-sync_project"));
assertThat(vendorString, containsString("dns-sync"));
assertThat(result.getEvidence(EvidenceType.PRODUCT).toString(), containsString("dns-sync"));
assertThat(result.getEvidence(EvidenceType.VERSION).toString(), containsString("0.1.0"));
assertEquals(NodePackageAnalyzer.DEPENDENCY_ECOSYSTEM, result.getEcosystem());
assertEquals("dns-sync", result.getName());
assertEquals("0.1.0", result.getVersion());
}
/**
* Test of inspect method, of class PythonDistributionAnalyzer.
*
* @throws AnalysisException is thrown when an exception occurs.
*/
@Test
public void testAnalyzePackageJsonWithShrinkwrap() throws AnalysisException {
final Dependency packageLock = new Dependency(BaseTest.getResourceAsFile(this,
"nodejs/package-lock.json"));
final Dependency shrinkwrap = new Dependency(BaseTest.getResourceAsFile(this,
"nodejs/npm-shrinkwrap.json"));
engine.addDependency(packageLock);
engine.addDependency(shrinkwrap);
assertEquals(2, engine.getDependencies().length);
analyzer.analyze(packageLock, engine);
assertEquals(1, engine.getDependencies().length); //package-lock was removed without analysis
analyzer.analyze(shrinkwrap, engine);
assertEquals(1, engine.getDependencies().length); //shrinkwrap was removed with analysis adding 1 dependency
}
}

View File

@@ -1,7 +1,5 @@
package org.owasp.dependencycheck.analyzer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.owasp.dependencycheck.BaseTest;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
@@ -11,91 +9,91 @@ import java.io.File;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.*;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.exception.InitializationException;
public class NspAnalyzerTest extends BaseTest {
private NspAnalyzer analyzer;
@Before
@Override
public void setUp() throws Exception {
super.setUp();
analyzer = new NspAnalyzer();
analyzer.setFilesMatched(true);
analyzer.initialize(getSettings());
analyzer.prepare(null);
}
@After
@Override
public void tearDown() throws Exception {
analyzer.close();
super.tearDown();
}
@Test
public void testGetName() {
NspAnalyzer analyzer = new NspAnalyzer();
assertThat(analyzer.getName(), is("Node Security Platform Analyzer"));
}
@Test
public void testSupportsFiles() {
NspAnalyzer analyzer = new NspAnalyzer();
assertThat(analyzer.accept(new File("package.json")), is(true));
}
@Test
public void testAnalyzePackage() throws AnalysisException {
final Dependency result = new Dependency(BaseTest.getResourceAsFile(this, "nsp/package.json"));
analyzer.analyze(result, null);
assertTrue(result.getEvidence(EvidenceType.VENDOR).toString().contains("owasp-nodejs-goat_project"));
assertTrue(result.getEvidence(EvidenceType.PRODUCT).toString().contains("A tool to learn OWASP Top 10 for node.js developers"));
assertTrue(result.getEvidence(EvidenceType.VERSION).toString().contains("1.3.0"));
public void testAnalyzePackage() throws AnalysisException, InitializationException {
try (Engine engine = new Engine(getSettings())) {
NspAnalyzer analyzer = new NspAnalyzer();
analyzer.setFilesMatched(true);
analyzer.initialize(getSettings());
analyzer.prepare(engine);
final Dependency toScan = new Dependency(BaseTest.getResourceAsFile(this, "nsp/package.json"));
analyzer.analyze(toScan, engine);
boolean found = false;
assertEquals("4 dependencies should be identified", 4, engine.getDependencies().length);
for (Dependency result : engine.getDependencies()) {
if ("package.json?uglify-js".equals(result.getFileName())) {
found = true;
assertTrue(result.getEvidence(EvidenceType.VENDOR).toString().contains("uglify-js"));
assertTrue(result.getEvidence(EvidenceType.PRODUCT).toString().contains("uglify-js"));
assertTrue(result.getEvidence(EvidenceType.VERSION).toString().contains("2.4.24"));
assertTrue(result.isVirtual());
}
}
assertTrue("Uglify was not found", found);
}
}
@Test
public void testAnalyzeEmpty() throws AnalysisException {
final Dependency result = new Dependency(BaseTest.getResourceAsFile(this, "nsp/empty.json"));
analyzer.analyze(result, null);
public void testAnalyzeEmpty() throws AnalysisException, InitializationException {
try (Engine engine = new Engine(getSettings())) {
NspAnalyzer analyzer = new NspAnalyzer();
analyzer.setFilesMatched(true);
analyzer.initialize(getSettings());
analyzer.prepare(engine);
final Dependency result = new Dependency(BaseTest.getResourceAsFile(this, "nsp/empty.json"));
analyzer.analyze(result, engine);
assertEquals(result.getEvidence(EvidenceType.VENDOR).size(), 0);
assertEquals(result.getEvidence(EvidenceType.PRODUCT).size(), 0);
assertEquals(result.getEvidence(EvidenceType.VERSION).size(), 0);
assertEquals(result.getEvidence(EvidenceType.VENDOR).size(), 0);
assertEquals(result.getEvidence(EvidenceType.PRODUCT).size(), 0);
assertEquals(result.getEvidence(EvidenceType.VERSION).size(), 0);
}
}
@Test
public void testAnalyzePackageJsonWithBundledDeps() throws AnalysisException {
final Dependency result = new Dependency(BaseTest.getResourceAsFile(this, "nsp/bundled.deps.package.json"));
analyzer.analyze(result, null);
assertTrue(result.getEvidence(EvidenceType.VENDOR).toString().contains("Philipp Dunkel <pip@pipobscure.com>"));
assertTrue(result.getEvidence(EvidenceType.PRODUCT).toString().contains("Native Access to Mac OS-X FSEvents"));
assertTrue(result.getEvidence(EvidenceType.VERSION).toString().contains("1.1.1"));
public void testAnalyzePackageJsonInNodeModulesDirectory() throws AnalysisException, InitializationException {
try (Engine engine = new Engine(getSettings())) {
NspAnalyzer analyzer = new NspAnalyzer();
analyzer.setFilesMatched(true);
analyzer.initialize(getSettings());
analyzer.prepare(engine);
final Dependency toScan = new Dependency(BaseTest.getResourceAsFile(this, "nodejs/node_modules/dns-sync/package.json"));
engine.addDependency(toScan);
analyzer.analyze(toScan, engine);
assertEquals("No dependencies should exist", 0, engine.getDependencies().length);
}
}
@Test
public void testAnalyzePackageJsonWithLicenseObject() throws AnalysisException {
final Dependency result = new Dependency(BaseTest.getResourceAsFile(this, "nsp/license.obj.package.json"));
analyzer.analyze(result, null);
assertTrue(result.getEvidence(EvidenceType.VENDOR).toString().contains("Twitter, Inc."));
assertTrue(result.getEvidence(EvidenceType.PRODUCT).toString().contains("The most popular front-end framework for developing responsive, mobile first projects on the web"));
assertTrue(result.getEvidence(EvidenceType.VERSION).toString().contains("3.2.0"));
}
@Test
public void testAnalyzePackageJsonInNodeModulesDirectory() throws AnalysisException {
final Dependency result = new Dependency(BaseTest.getResourceAsFile(this, "nodejs/node_modules/dns-sync/package.json"));
analyzer.analyze(result, null);
// node modules are not scanned - no evidence is collected
assertTrue(result.size() == 0);
}
@Test
public void testAnalyzeInvalidPackageMissingName() throws AnalysisException {
final Dependency result = new Dependency(BaseTest.getResourceAsFile(this, "nsp/minimal-invalid.json"));
analyzer.analyze(result, null);
// Upon analysis, not throwing an exception in this case, is all that's required to pass this test
public void testAnalyzeInvalidPackageMissingName() throws AnalysisException, InitializationException {
try (Engine engine = new Engine(getSettings())) {
NspAnalyzer analyzer = new NspAnalyzer();
analyzer.setFilesMatched(true);
analyzer.initialize(getSettings());
analyzer.prepare(engine);
final Dependency result = new Dependency(BaseTest.getResourceAsFile(this, "nsp/minimal-invalid.json"));
analyzer.analyze(result, engine);
// Upon analysis, not throwing an exception in this case, is all that's required to pass this test
} catch(Throwable ex) {
fail("This test should not throw an exception");
throw ex;
}
}
}

View File

@@ -126,12 +126,12 @@ public class EscapeToolTest {
public void testCsv() {
String text = null;
EscapeTool instance = new EscapeTool();
String expResult = null;
String expResult = "\"\"";
String result = instance.csv(text);
assertEquals(expResult, result);
text = "";
expResult = "";
expResult = "\"\"";
result = instance.csv(text);
assertEquals(expResult, result);
@@ -148,38 +148,38 @@ public class EscapeToolTest {
public void testCsvIdentifiers() {
EscapeTool instance = new EscapeTool();
Set<Identifier> ids = null;
String expResult = "";
String expResult = "\"\"";
String result = instance.csvIdentifiers(ids);
assertEquals(expResult, result);
ids = new HashSet<>();
expResult = "";
expResult = "\"\"";
result = instance.csvIdentifiers(ids);
assertEquals(expResult, result);
ids = new HashSet<>();
ids.add(new Identifier("cpe", "cpe:/a:somegroup:something:1.0", ""));
expResult = "";
expResult = "\"\"";
result = instance.csvIdentifiers(ids);
assertEquals(expResult, result);
ids = new HashSet<>();
ids.add(new Identifier("gav", "somegroup:something:1.0", ""));
ids.add(new Identifier("maven", "somegroup:something:1.0", ""));
expResult = "somegroup:something:1.0";
result = instance.csvIdentifiers(ids);
assertEquals(expResult, result);
ids = new HashSet<>();
ids.add(new Identifier("cpe", "cpe:/a:somegroup:something:1.0", ""));
ids.add(new Identifier("gav", "somegroup:something:1.0", ""));
ids.add(new Identifier("maven", "somegroup:something:1.0", ""));
expResult = "somegroup:something:1.0";
result = instance.csvIdentifiers(ids);
assertEquals(expResult, result);
ids = new HashSet<>();
ids.add(new Identifier("cpe", "cpe:/a:somegroup:something:1.0", ""));
ids.add(new Identifier("gav", "somegroup:something:1.0", ""));
ids.add(new Identifier("gav", "somegroup2:something:1.2", ""));
ids.add(new Identifier("maven", "somegroup:something:1.0", ""));
ids.add(new Identifier("maven", "somegroup2:something:1.2", ""));
expResult = "\"somegroup:something:1.0, somegroup2:something:1.2\"";
String expResult2 = "\"somegroup2:something:1.2, somegroup:something:1.0\"";
result = instance.csvIdentifiers(ids);
@@ -193,18 +193,18 @@ public class EscapeToolTest {
public void testCsvCpe() {
EscapeTool instance = new EscapeTool();
Set<Identifier> ids = null;
String expResult = "";
String expResult = "\"\"";
String result = instance.csvCpe(ids);
assertEquals(expResult, result);
ids = new HashSet<>();
expResult = "";
expResult = "\"\"";
result = instance.csvCpe(ids);
assertEquals(expResult, result);
ids = new HashSet<>();
ids.add(new Identifier("gav", "somegroup:something:1.0", ""));
expResult = "";
ids.add(new Identifier("maven", "somegroup:something:1.0", ""));
expResult = "\"\"";
result = instance.csvCpe(ids);
assertEquals(expResult, result);
@@ -216,14 +216,14 @@ public class EscapeToolTest {
ids = new HashSet<>();
ids.add(new Identifier("cpe", "cpe:/a:somegroup:something:1.0", ""));
ids.add(new Identifier("gav", "somegroup:something:1.0", ""));
ids.add(new Identifier("maven", "somegroup:something:1.0", ""));
expResult = "cpe:/a:somegroup:something:1.0";
result = instance.csvCpe(ids);
assertEquals(expResult, result);
ids = new HashSet<>();
ids.add(new Identifier("cpe", "cpe:/a:somegroup:something:1.0", ""));
ids.add(new Identifier("gav", "somegroup:something:1.0", ""));
ids.add(new Identifier("maven", "somegroup:something:1.0", ""));
ids.add(new Identifier("cpe", "cpe:/a:somegroup2:something:1.2", ""));
expResult = "\"cpe:/a:somegroup:something:1.0, cpe:/a:somegroup2:something:1.2\"";
String expResult2 = "\"cpe:/a:somegroup2:something:1.2, cpe:/a:somegroup:something:1.0\"";
@@ -238,18 +238,18 @@ public class EscapeToolTest {
public void testCsvCpeConfidence() {
EscapeTool instance = new EscapeTool();
Set<Identifier> ids = null;
String expResult = "";
String expResult = "\"\"";
String result = instance.csvCpeConfidence(ids);
assertEquals(expResult, result);
ids = new HashSet<>();
expResult = "";
expResult = "\"\"";
result = instance.csvCpeConfidence(ids);
assertEquals(expResult, result);
ids = new HashSet<>();
ids.add(new Identifier("gav", "somegroup:something:1.0", ""));
expResult = "";
ids.add(new Identifier("maven", "somegroup:something:1.0", ""));
expResult = "\"\"";
result = instance.csvCpeConfidence(ids);
assertEquals(expResult, result);
@@ -268,7 +268,7 @@ public class EscapeToolTest {
Identifier i2 = new Identifier("cpe", "cpe:/a:somegroup:something2:1.0", "");
i2.setConfidence(Confidence.MEDIUM);
ids.add(i2);
Identifier i3 = new Identifier("gav", "somegroup:something:1.0", "");
Identifier i3 = new Identifier("maven", "somegroup:something:1.0", "");
i3.setConfidence(Confidence.LOW);
ids.add(i3);
@@ -285,18 +285,18 @@ public class EscapeToolTest {
public void testCsvGav() {
EscapeTool instance = new EscapeTool();
Set<Identifier> ids = null;
String expResult = "";
String expResult = "\"\"";
String result = instance.csvGav(ids);
assertEquals(expResult, result);
ids = new HashSet<>();
expResult = "";
expResult = "\"\"";
result = instance.csvGav(ids);
assertEquals(expResult, result);
ids = new HashSet<>();
ids.add(new Identifier("cpe", "somegroup:something:1.0", ""));
expResult = "";
expResult = "\"\"";
result = instance.csvGav(ids);
assertEquals(expResult, result);

View File

@@ -123,3 +123,5 @@ analyzer.nvdcve.enabled=true
analyzer.vulnerabilitysuppression.enabled=true
updater.nvdcve.enabled=true
updater.versioncheck.enabled=true
ecosystem.skip.cpeanalyzer=npm

View File

@@ -0,0 +1,18 @@
{
"name": "test",
"version": "0.0.1",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"dns-sync": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/dns-sync/-/dns-sync-0.1.0.tgz",
"integrity": "sha1-gPcpFC513UtfSx0+Upcx7jEplHI=",
"requires": {
"debug": "2.6.9",
"shelljs": "0.5.3"
}
}
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "test",
"version": "0.0.1",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"dns-sync": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/dns-sync/-/dns-sync-0.1.0.tgz",
"integrity": "sha1-gPcpFC513UtfSx0+Upcx7jEplHI=",
"requires": {
"debug": "2.6.9",
"shelljs": "0.5.3"
}
}
}
}

View File

@@ -1,48 +0,0 @@
{
"name": "fsevents",
"version": "1.1.1",
"description": "Native Access to Mac OS-X FSEvents",
"main": "fsevents.js",
"dependencies": {
"nan": "^2.3.0",
"node-pre-gyp": "^0.6.29"
},
"os": [
"darwin"
],
"engines": {
"node": ">=0.8.0"
},
"scripts": {
"install": "node install",
"prepublish": "if [ $(npm -v | head -c 1) -lt 3 ]; then exit 1; fi && npm dedupe",
"test": "tap ./test"
},
"binary": {
"module_name": "fse",
"module_path": "./lib/binding/{configuration}/{node_abi}-{platform}-{arch}/",
"remote_path": "./v{version}/",
"package_name": "{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz",
"host": "https://fsevents-binaries.s3-us-west-2.amazonaws.com"
},
"repository": {
"type": "git",
"url": "https://github.com/strongloop/fsevents.git"
},
"keywords": [
"fsevents",
"mac"
],
"author": "Philipp Dunkel <pip@pipobscure.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/strongloop/fsevents/issues"
},
"bundledDependencies": [
"node-pre-gyp"
],
"homepage": "https://github.com/strongloop/fsevents",
"devDependencies": {
"tap": "~0.4.8"
}
}

View File

@@ -1,81 +0,0 @@
{
"name": "bootstrap",
"description": "The most popular front-end framework for developing responsive, mobile first projects on the web.",
"version": "3.2.0",
"keywords": [
"css",
"less",
"mobile-first",
"responsive",
"front-end",
"framework",
"web"
],
"homepage": "http://getbootstrap.com",
"author": "Twitter, Inc.",
"scripts": {
"test": "grunt test"
},
"style": "dist/css/bootstrap.css",
"less": "less/bootstrap.less",
"repository": {
"type": "git",
"url": "https://github.com/twbs/bootstrap.git"
},
"bugs": {
"url": "https://github.com/twbs/bootstrap/issues"
},
"license": {
"type": "MIT",
"url": "https://github.com/twbs/bootstrap/blob/master/LICENSE"
},
"devDependencies": {
"btoa": "~1.1.2",
"glob": "~4.0.2",
"grunt": "~0.4.5",
"grunt-autoprefixer": "~0.7.6",
"grunt-banner": "~0.2.3",
"grunt-contrib-clean": "~0.5.0",
"grunt-contrib-concat": "~0.4.0",
"grunt-contrib-connect": "~0.8.0",
"grunt-contrib-copy": "~0.5.0",
"grunt-contrib-csslint": "~0.2.0",
"grunt-contrib-cssmin": "~0.10.0",
"grunt-contrib-jade": "~0.12.0",
"grunt-contrib-jshint": "~0.10.0",
"grunt-contrib-less": "~0.11.3",
"grunt-contrib-qunit": "~0.5.1",
"grunt-contrib-uglify": "~0.5.0",
"grunt-contrib-watch": "~0.6.1",
"grunt-csscomb": "~2.0.1",
"grunt-exec": "~0.4.5",
"grunt-html-validation": "~0.1.18",
"grunt-jekyll": "~0.4.2",
"grunt-jscs-checker": "~0.6.0",
"grunt-saucelabs": "~8.1.0",
"grunt-sed": "~0.1.1",
"load-grunt-tasks": "~0.6.0",
"markdown": "~0.5.0",
"npm-shrinkwrap": "~3.1.6",
"time-grunt": "~0.3.2"
},
"engines": {
"node": "~0.10.1"
},
"jspm": {
"main": "js/bootstrap",
"directories": {
"example": "examples",
"lib": "dist"
},
"shim": {
"js/bootstrap": {
"imports": "jquery",
"exports": "$"
}
},
"buildConfig": {
"uglify": true
}
}
}

View File

@@ -20,7 +20,7 @@ Copyright (c) 2013 Jeremy Long. All Rights Reserved.
<parent>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId>
<version>3.0.3-SNAPSHOT</version>
<version>3.1.0-SNAPSHOT</version>
</parent>
<artifactId>dependency-check-maven</artifactId>
<packaging>maven-plugin</packaging>

View File

@@ -21,7 +21,7 @@ Copyright (c) 2017 Jeremy Long. All Rights Reserved.
<parent>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId>
<version>3.0.3-SNAPSHOT</version>
<version>3.1.0-SNAPSHOT</version>
</parent>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-plugin</artifactId>

View File

@@ -20,7 +20,7 @@ Copyright (c) 2014 - Jeremy Long. All Rights Reserved.
<parent>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId>
<version>3.0.3-SNAPSHOT</version>
<version>3.1.0-SNAPSHOT</version>
</parent>
<artifactId>dependency-check-utils</artifactId>

View File

@@ -442,6 +442,10 @@ public final class Settings {
* new version available.
*/
public static final String UPDATE_VERSION_CHECK_ENABLED = "updater.versioncheck.enabled";
/**
* The key to determine which ecosystems should skip the CPE analysis.
*/
public static final String ECOSYSTEM_SKIP_CPEANALYZER = "ecosystem.skip.cpeanalyzer";
/**
* private constructor because this is a "utility" class containing

View File

@@ -20,7 +20,7 @@ Copyright (c) 2012 - Jeremy Long
<groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId>
<version>3.0.3-SNAPSHOT</version>
<version>3.1.0-SNAPSHOT</version>
<packaging>pom</packaging>
<modules>
@@ -625,6 +625,11 @@ Copyright (c) 2012 - Jeremy Long
</reporting>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.vdurmont</groupId>
<artifactId>semver4j</artifactId>
<version>2.1.0</version>
</dependency>
<!-- analysis core (used by Jenkins) uses 1.6-->
<dependency>
<groupId>joda-time</groupId>