mirror of
https://github.com/ysoftdevs/DependencyCheck.git
synced 2026-01-14 07:43:40 +01:00
overhaul node package and nsp analyzer
This commit is contained in:
@@ -21,6 +21,7 @@ import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
@@ -106,6 +107,12 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
* The CVE Database.
|
||||
*/
|
||||
private CveDB cve;
|
||||
/**
|
||||
* The list of ecosystems to skip during analysis. These are skipped because
|
||||
* there is generally a more accurate vulnerability analyzer in the
|
||||
* pipeline.
|
||||
*/
|
||||
private List<String> skipEcosystems;
|
||||
|
||||
/**
|
||||
* Returns the name of this analyzer.
|
||||
@@ -136,6 +143,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
*/
|
||||
@Override
|
||||
public void prepareAnalyzer(Engine engine) throws InitializationException {
|
||||
super.prepareAnalyzer(engine);
|
||||
try {
|
||||
this.open(engine.getDatabase());
|
||||
} catch (IOException ex) {
|
||||
@@ -145,6 +153,13 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
LOGGER.debug("Exception accessing the database", ex);
|
||||
throw new InitializationException("An exception occurred accessing the database", ex);
|
||||
}
|
||||
final String[] tmp = engine.getSettings().getArray(Settings.KEYS.ECOSYSTEM_SKIP_CPEANALYZER);
|
||||
if (tmp == null) {
|
||||
skipEcosystems = new ArrayList<>();
|
||||
} else {
|
||||
LOGGER.info("Skipping CPE Analysis for {}", tmp);
|
||||
skipEcosystems = Arrays.asList(tmp);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -525,6 +540,9 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (skipEcosystems.contains(dependency.getEcosystem())) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
determineCPE(dependency);
|
||||
} catch (CorruptIndexException ex) {
|
||||
|
||||
@@ -139,8 +139,9 @@ public class DependencyMergingAnalyzer extends AbstractDependencyComparingAnalyz
|
||||
relatedDependency.removeRelatedDependencies(d);
|
||||
}
|
||||
dependency.addAllProjectReferences(relatedDependency.getProjectReferences());
|
||||
|
||||
dependenciesToRemove.add(relatedDependency);
|
||||
if (dependenciesToRemove != null) {
|
||||
dependenciesToRemove.add(relatedDependency);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -20,7 +20,6 @@ package org.owasp.dependencycheck.analyzer;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
@@ -30,6 +29,7 @@ import org.slf4j.LoggerFactory;
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@@ -38,11 +38,12 @@ import javax.json.Json;
|
||||
import javax.json.JsonException;
|
||||
import javax.json.JsonObject;
|
||||
import javax.json.JsonReader;
|
||||
import javax.json.JsonString;
|
||||
import javax.json.JsonValue;
|
||||
import org.owasp.dependencycheck.Engine.Mode;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.Checksum;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
|
||||
/**
|
||||
@@ -52,7 +53,7 @@ import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
* @author Dale Visser
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
public class NodePackageAnalyzer extends AbstractNpmAnalyzer {
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
@@ -62,7 +63,7 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* A descriptor for the type of dependencies processed or added by this
|
||||
* analyzer.
|
||||
*/
|
||||
public static final String DEPENDENCY_ECOSYSTEM = "npm";
|
||||
public static final String DEPENDENCY_ECOSYSTEM = NPM_DEPENDENCY_ECOSYSTEM;
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
@@ -76,10 +77,18 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
public static final String PACKAGE_JSON = "package.json";
|
||||
/**
|
||||
* Filter that detects files named "package.json".
|
||||
* The file name to scan.
|
||||
*/
|
||||
public static final String PACKAGE_LOCK_JSON = "package-lock.json";
|
||||
/**
|
||||
* The file name to scan.
|
||||
*/
|
||||
public static final String SHRINKWRAP_JSON = "shrinkwrap.json";
|
||||
/**
|
||||
* Filter that detects files named "package-lock.json" or "shrinkwrap.json".
|
||||
*/
|
||||
private static final FileFilter PACKAGE_JSON_FILTER = FileFilterBuilder.newInstance()
|
||||
.addFilenames(PACKAGE_JSON).build();
|
||||
.addFilenames(PACKAGE_LOCK_JSON, SHRINKWRAP_JSON).build();
|
||||
|
||||
/**
|
||||
* Returns the FileFilter
|
||||
@@ -103,7 +112,7 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if (engine.getMode() != Mode.EVIDENCE_COLLECTION) {
|
||||
try {
|
||||
final Settings settings = engine.getSettings();
|
||||
final String[] tmp = settings.getArray(Settings.KEYS.ECOSYSTEM_SKIP_NVDCVE);
|
||||
final String[] tmp = settings.getArray(Settings.KEYS.ECOSYSTEM_SKIP_CPEANALYZER);
|
||||
if (tmp != null) {
|
||||
final List<String> skipEcosystems = Arrays.asList(tmp);
|
||||
if (skipEcosystems.contains(DEPENDENCY_ECOSYSTEM)
|
||||
@@ -113,7 +122,7 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
+ "using the NSP Analyzer is not supported.";
|
||||
throw new InitializationException(msg);
|
||||
} else if (!skipEcosystems.contains(DEPENDENCY_ECOSYSTEM)) {
|
||||
LOGGER.warn("Using the NVD CVE Analyzer with Node.js can result in many false positives.");
|
||||
LOGGER.warn("Using the CPE Analyzer with Node.js can result in many false positives.");
|
||||
}
|
||||
}
|
||||
} catch (InvalidSettingException ex) {
|
||||
@@ -143,10 +152,10 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
* Returns the key used in the properties file to reference the enabled
|
||||
* property for the analyzer.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
* @return the enabled property setting key for the analyzer
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
@@ -155,16 +164,34 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
dependency.setEcosystem(DEPENDENCY_ECOSYSTEM);
|
||||
final File file = dependency.getActualFile();
|
||||
engine.removeDependency(dependency);
|
||||
File file = dependency.getActualFile();
|
||||
if (!file.isFile() || file.length() == 0) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
// Do not scan the node_modules directory
|
||||
if (file.getCanonicalPath().contains(File.separator + "node_modules" + File.separator)) {
|
||||
LOGGER.debug("Skipping analysis of node module: " + file.getCanonicalPath());
|
||||
return;
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
File baseDir = file.getParentFile();
|
||||
if (PACKAGE_LOCK_JSON.equals(dependency.getFileName())) {
|
||||
File shrinkwrap = new File(baseDir, SHRINKWRAP_JSON);
|
||||
if (shrinkwrap.exists()) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
try (JsonReader jsonReader = Json.createReader(FileUtils.openInputStream(file))) {
|
||||
final JsonObject json = jsonReader.readObject();
|
||||
|
||||
gatherEvidence(json, dependency);
|
||||
|
||||
final String parentName = json.getString("name");
|
||||
final String parentVersion = json.getString("version");
|
||||
final String parentPackage = String.format("%s:%s", parentName, parentVersion);
|
||||
processDependencies(json, baseDir, file, parentPackage, engine);
|
||||
} catch (JsonException e) {
|
||||
LOGGER.warn("Failed to parse package.json file.", e);
|
||||
} catch (IOException e) {
|
||||
@@ -172,80 +199,94 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects evidence from the given JSON for the associated dependency.
|
||||
*
|
||||
* @param json the JSON that contains the evidence to collect
|
||||
* @param dependency the dependency to add the evidence too
|
||||
*/
|
||||
public static void gatherEvidence(final JsonObject json, Dependency dependency) {
|
||||
if (json.containsKey("name")) {
|
||||
final Object value = json.get("name");
|
||||
if (value instanceof JsonString) {
|
||||
final String valueString = ((JsonString) value).getString();
|
||||
dependency.setName(valueString);
|
||||
dependency.setPackagePath(valueString);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, PACKAGE_JSON, "name", valueString, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, PACKAGE_JSON, "name", valueString, Confidence.HIGH);
|
||||
} else {
|
||||
LOGGER.warn("JSON value not string as expected: {}", value);
|
||||
}
|
||||
}
|
||||
addToEvidence(dependency, EvidenceType.PRODUCT, json, "description");
|
||||
addToEvidence(dependency, EvidenceType.VENDOR, json, "author");
|
||||
final String version = addToEvidence(dependency, EvidenceType.VERSION, json, "version");
|
||||
if (version != null) {
|
||||
dependency.setVersion(version);
|
||||
dependency.addIdentifier("npm", String.format("%s:%s", dependency.getName(), version), null, Confidence.HIGHEST);
|
||||
}
|
||||
private void processDependencies(final JsonObject json, File baseDir, File rootFile, final String parentPackage, Engine engine) throws AnalysisException {
|
||||
if (json.containsKey("dependencies")) {
|
||||
JsonObject deps = json.getJsonObject("dependencies");
|
||||
for (Map.Entry<String, JsonValue> entry : deps.entrySet()) {
|
||||
JsonObject jo = (JsonObject) entry.getValue();
|
||||
final String name = entry.getKey();
|
||||
final String version = jo.getString("version");
|
||||
File base = Paths.get(baseDir.getPath(), "node_modules", name).toFile();
|
||||
File f = new File(base, PACKAGE_JSON);
|
||||
|
||||
// Adds the license if defined in package.json
|
||||
if (json.containsKey("license")) {
|
||||
final Object value = json.get("license");
|
||||
if (value instanceof JsonString) {
|
||||
dependency.setLicense(json.getString("license"));
|
||||
} else {
|
||||
dependency.setLicense(json.getJsonObject("license").getString("type"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds information to an evidence collection from the node json
|
||||
* configuration.
|
||||
*
|
||||
* @param dep the dependency to add the evidence
|
||||
* @param t the type of evidence to add
|
||||
* @param json information from node.js
|
||||
* @return the actual string set into evidence
|
||||
* @param key the key to obtain the data from the json information
|
||||
*/
|
||||
private static String addToEvidence(Dependency dep, EvidenceType t, JsonObject json, String key) {
|
||||
String evidenceStr = null;
|
||||
if (json.containsKey(key)) {
|
||||
final JsonValue value = json.get(key);
|
||||
if (value instanceof JsonString) {
|
||||
evidenceStr = ((JsonString) value).getString();
|
||||
dep.addEvidence(t, PACKAGE_JSON, key, evidenceStr, Confidence.HIGHEST);
|
||||
} else if (value instanceof JsonObject) {
|
||||
final JsonObject jsonObject = (JsonObject) value;
|
||||
for (final Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
|
||||
final String property = entry.getKey();
|
||||
final JsonValue subValue = entry.getValue();
|
||||
if (subValue instanceof JsonString) {
|
||||
evidenceStr = ((JsonString) subValue).getString();
|
||||
dep.addEvidence(t, PACKAGE_JSON,
|
||||
String.format("%s.%s", key, property),
|
||||
evidenceStr,
|
||||
Confidence.HIGHEST);
|
||||
} else {
|
||||
LOGGER.warn("JSON sub-value not string as expected: {}", subValue);
|
||||
}
|
||||
if (jo.containsKey("dependencies")) {
|
||||
final String subPackageName = String.format("%s/%s:%s", parentPackage, name, version);
|
||||
processDependencies(jo, base, rootFile, subPackageName, engine);
|
||||
}
|
||||
|
||||
Dependency child;
|
||||
if (f.exists()) {
|
||||
//TOOD - we should use the integrity value instead of calculating the SHA1/MD5
|
||||
child = new Dependency(f);
|
||||
try (JsonReader jr = Json.createReader(FileUtils.openInputStream(f))) {
|
||||
JsonObject childJson = jr.readObject();
|
||||
gatherEvidence(childJson, child);
|
||||
|
||||
} catch (JsonException e) {
|
||||
LOGGER.warn("Failed to parse package.json file from dependency.", e);
|
||||
} catch (IOException e) {
|
||||
throw new AnalysisException("Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
} else {
|
||||
LOGGER.error("Unable to find child file {}", f.toString());
|
||||
child = new Dependency(rootFile, true);
|
||||
//TOOD - we should use the integrity value instead of calculating the SHA1/MD5
|
||||
child.setSha1sum(Checksum.getSHA1Checksum(String.format("%s:%s", name, version)));
|
||||
child.setMd5sum(Checksum.getMD5Checksum(String.format("%s:%s", name, version)));
|
||||
child.addEvidence(EvidenceType.VENDOR, rootFile.getName(), "name", name, Confidence.HIGHEST);
|
||||
child.addEvidence(EvidenceType.PRODUCT, rootFile.getName(), "name", name, Confidence.HIGHEST);
|
||||
child.addEvidence(EvidenceType.VERSION, rootFile.getName(), "version", version, Confidence.HIGHEST);
|
||||
}
|
||||
child.setName(name);
|
||||
child.setVersion(version);
|
||||
child.addProjectReference(parentPackage);
|
||||
child.setEcosystem(DEPENDENCY_ECOSYSTEM);
|
||||
|
||||
Dependency existing = findDependency(engine, name, version);
|
||||
if (existing != null) {
|
||||
if (existing.isVirtual()) {
|
||||
DependencyMergingAnalyzer.mergeDependencies(child, existing, null);
|
||||
engine.removeDependency(existing);
|
||||
engine.addDependency(child);
|
||||
} else {
|
||||
DependencyBundlingAnalyzer.mergeDependencies(existing, child, null);
|
||||
}
|
||||
} else {
|
||||
engine.addDependency(child);
|
||||
}
|
||||
} else {
|
||||
LOGGER.warn("JSON value not string or JSON object as expected: {}", value);
|
||||
}
|
||||
}
|
||||
return evidenceStr;
|
||||
|
||||
// gatherEvidence(json, dependency);
|
||||
//
|
||||
// // only run this if we are in evidence collection or the NSP analyzer has been disabled
|
||||
// if (engine.getMode() == Mode.EVIDENCE_COLLECTION
|
||||
// || !engine.getSettings().getBoolean(Settings.KEYS.ANALYZER_NSP_PACKAGE_ENABLED)) {
|
||||
// //Processes the dependencies objects in package.json and adds all the modules as dependencies
|
||||
// if (json.containsKey("dependencies")) {
|
||||
// final JsonObject dependencies = json.getJsonObject("dependencies");
|
||||
// processPackage(engine, dependency, dependencies, "dependencies");
|
||||
// }
|
||||
// if (json.containsKey("devDependencies")) {
|
||||
// final JsonObject dependencies = json.getJsonObject("devDependencies");
|
||||
// processPackage(engine, dependency, dependencies, "devDependencies");
|
||||
// }
|
||||
// if (json.containsKey("optionalDependencies")) {
|
||||
// final JsonObject dependencies = json.getJsonObject("optionalDependencies");
|
||||
// processPackage(engine, dependency, dependencies, "optionalDependencies");
|
||||
// }
|
||||
// if (json.containsKey("peerDependencies")) {
|
||||
// final JsonObject dependencies = json.getJsonObject("peerDependencies");
|
||||
// processPackage(engine, dependency, dependencies, "peerDependencies");
|
||||
// }
|
||||
// if (json.containsKey("bundleDependencies")) {
|
||||
// final JsonArray dependencies = json.getJsonArray("bundleDependencies");
|
||||
// processPackage(engine, dependency, dependencies, "bundleDependencies");
|
||||
// }
|
||||
// if (json.containsKey("bundledDependencies")) {
|
||||
// final JsonArray dependencies = json.getJsonArray("bundledDependencies");
|
||||
// processPackage(engine, dependency, dependencies, "bundledDependencies");
|
||||
// }
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,7 +23,6 @@ import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.data.nsp.Advisory;
|
||||
import org.owasp.dependencycheck.data.nsp.NspSearch;
|
||||
import org.owasp.dependencycheck.data.nsp.SanitizePackage;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
|
||||
@@ -38,19 +37,13 @@ import java.net.MalformedURLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import javax.json.Json;
|
||||
import javax.json.JsonArray;
|
||||
import javax.json.JsonException;
|
||||
import javax.json.JsonObject;
|
||||
import javax.json.JsonObjectBuilder;
|
||||
import javax.json.JsonReader;
|
||||
import javax.json.JsonString;
|
||||
import javax.json.JsonValue;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.Checksum;
|
||||
import org.owasp.dependencycheck.utils.URLConnectionFailureException;
|
||||
|
||||
/**
|
||||
@@ -60,7 +53,7 @@ import org.owasp.dependencycheck.utils.URLConnectionFailureException;
|
||||
* @author Steve Springett
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class NspAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
public class NspAnalyzer extends AbstractNpmAnalyzer {
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
@@ -75,7 +68,7 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* A descriptor for the type of dependencies processed or added by this
|
||||
* analyzer.
|
||||
*/
|
||||
public static final String DEPENDENCY_ECOSYSTEM = NodePackageAnalyzer.DEPENDENCY_ECOSYSTEM;
|
||||
public static final String DEPENDENCY_ECOSYSTEM = NPM_DEPENDENCY_ECOSYSTEM;
|
||||
/**
|
||||
* The file name to scan.
|
||||
*/
|
||||
@@ -152,53 +145,27 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
engine.removeDependency(dependency);
|
||||
final File file = dependency.getActualFile();
|
||||
if (!file.isFile() || file.length() == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Do not scan the node_modules directory
|
||||
if (file.getCanonicalPath().contains(File.separator + "node_modules" + File.separator)) {
|
||||
LOGGER.debug("Skipping analysis of node module: " + file.getCanonicalPath());
|
||||
return;
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
|
||||
try (JsonReader jsonReader = Json.createReader(FileUtils.openInputStream(file))) {
|
||||
|
||||
// Retrieves the contents of package.json from the Dependency
|
||||
final JsonObject packageJson = jsonReader.readObject();
|
||||
|
||||
if (dependency.getEcosystem() == null || dependency.getName() == null) {
|
||||
NodePackageAnalyzer.gatherEvidence(packageJson, dependency);
|
||||
dependency.setEcosystem(DEPENDENCY_ECOSYSTEM);
|
||||
}
|
||||
|
||||
// Do not scan the node_modules directory
|
||||
if (file.getCanonicalPath().contains(File.separator + "node_modules" + File.separator)) {
|
||||
LOGGER.debug("Skipping analysis of node module: " + file.getCanonicalPath());
|
||||
return;
|
||||
}
|
||||
|
||||
//Processes the dependencies objects in package.json and adds all the modules as dependencies
|
||||
if (packageJson.containsKey("dependencies")) {
|
||||
final JsonObject dependencies = packageJson.getJsonObject("dependencies");
|
||||
processPackage(engine, dependency, dependencies, "dependencies");
|
||||
}
|
||||
if (packageJson.containsKey("devDependencies")) {
|
||||
final JsonObject dependencies = packageJson.getJsonObject("devDependencies");
|
||||
processPackage(engine, dependency, dependencies, "devDependencies");
|
||||
}
|
||||
if (packageJson.containsKey("optionalDependencies")) {
|
||||
final JsonObject dependencies = packageJson.getJsonObject("optionalDependencies");
|
||||
processPackage(engine, dependency, dependencies, "optionalDependencies");
|
||||
}
|
||||
if (packageJson.containsKey("peerDependencies")) {
|
||||
final JsonObject dependencies = packageJson.getJsonObject("peerDependencies");
|
||||
processPackage(engine, dependency, dependencies, "peerDependencies");
|
||||
}
|
||||
if (packageJson.containsKey("bundleDependencies")) {
|
||||
final JsonArray dependencies = packageJson.getJsonArray("bundleDependencies");
|
||||
processPackage(engine, dependency, dependencies, "bundleDependencies");
|
||||
}
|
||||
if (packageJson.containsKey("bundledDependencies")) {
|
||||
final JsonArray dependencies = packageJson.getJsonArray("bundledDependencies");
|
||||
processPackage(engine, dependency, dependencies, "bundledDependencies");
|
||||
}
|
||||
|
||||
// Create a sanitized version of the package.json
|
||||
final JsonObject sanitizedJson = SanitizePackage.sanitize(packageJson);
|
||||
|
||||
@@ -228,7 +195,8 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* Create a single vulnerable software object - these do not use CPEs unlike the NVD.
|
||||
*/
|
||||
final VulnerableSoftware vs = new VulnerableSoftware();
|
||||
//TODO consider changing this to available versions on the dependency
|
||||
//TODO consider changing this to available versions on the dependency
|
||||
// - the update is a part of the version, not versions to update to
|
||||
//vs.setUpdate(advisory.getPatchedVersions());
|
||||
|
||||
vs.setName(advisory.getModule() + ":" + advisory.getVulnerableVersions());
|
||||
@@ -254,160 +222,4 @@ public class NspAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
throw new AnalysisException(String.format("Failed to parse %s file.", file.getPath()), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a dependency object.
|
||||
*
|
||||
* @param dependency the parent dependency
|
||||
* @param name the name of the dependency to create
|
||||
* @param version the version of the dependency to create
|
||||
* @param scope the scope of the dependency being created
|
||||
* @return the generated dependency
|
||||
*/
|
||||
private Dependency createDependency(Dependency dependency, String name, String version, String scope) {
|
||||
final Dependency nodeModule = new Dependency(new File(dependency.getActualFile() + "?" + name), true);
|
||||
nodeModule.setEcosystem(DEPENDENCY_ECOSYSTEM);
|
||||
//this is virtual - the sha1 is purely for the hyperlink in the final html report
|
||||
nodeModule.setSha1sum(Checksum.getSHA1Checksum(String.format("%s:%s", name, version)));
|
||||
nodeModule.setMd5sum(Checksum.getMD5Checksum(String.format("%s:%s", name, version)));
|
||||
nodeModule.addEvidence(EvidenceType.PRODUCT, "package.json", "name", name, Confidence.HIGHEST);
|
||||
nodeModule.addEvidence(EvidenceType.VENDOR, "package.json", "name", name, Confidence.HIGH);
|
||||
nodeModule.addEvidence(EvidenceType.VERSION, "package.json", "version", version, Confidence.HIGHEST);
|
||||
nodeModule.addProjectReference(dependency.getName() + ": " + scope);
|
||||
nodeModule.setName(name);
|
||||
nodeModule.setVersion(version);
|
||||
nodeModule.addIdentifier("npm", String.format("%s:%s", name, version), null, Confidence.HIGHEST);
|
||||
return nodeModule;
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a part of package.json (as defined by JsonArray) and update the
|
||||
* specified dependency with relevant info.
|
||||
*
|
||||
* @param engine the dependency-check engine
|
||||
* @param dependency the Dependency to update
|
||||
* @param jsonArray the jsonArray to parse
|
||||
* @param depType the dependency type
|
||||
*/
|
||||
private void processPackage(Engine engine, Dependency dependency, JsonArray jsonArray, String depType) {
|
||||
final JsonObjectBuilder builder = Json.createObjectBuilder();
|
||||
for (JsonString str : jsonArray.getValuesAs(JsonString.class)) {
|
||||
builder.add(str.toString(), "");
|
||||
}
|
||||
final JsonObject jsonObject = builder.build();
|
||||
processPackage(engine, dependency, jsonObject, depType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a part of package.json (as defined by JsonObject) and update
|
||||
* the specified dependency with relevant info.
|
||||
*
|
||||
* @param engine the dependency-check engine
|
||||
* @param dependency the Dependency to update
|
||||
* @param jsonObject the jsonObject to parse
|
||||
* @param depType the dependency type
|
||||
*/
|
||||
private void processPackage(Engine engine, Dependency dependency, JsonObject jsonObject, String depType) {
|
||||
for (int i = 0; i < jsonObject.size(); i++) {
|
||||
for (Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
|
||||
|
||||
final String name = entry.getKey();
|
||||
String version = "";
|
||||
if (entry.getValue() != null && entry.getValue().getValueType() == JsonValue.ValueType.STRING) {
|
||||
version = ((JsonString) entry.getValue()).getString();
|
||||
}
|
||||
final Dependency existing = findDependency(engine, name, version);
|
||||
if (existing == null) {
|
||||
final Dependency nodeModule = createDependency(dependency, name, version, depType);
|
||||
engine.addDependency(nodeModule);
|
||||
} else {
|
||||
existing.addProjectReference(dependency.getName() + ": " + depType);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds information to an evidence collection from the node json
|
||||
* configuration.
|
||||
*
|
||||
* @param dep the dependency to which the evidence will be added
|
||||
* @param type the type of evidence to be added
|
||||
* @param json information from node.js
|
||||
* @param key the key to obtain the data from the json information
|
||||
*/
|
||||
private void addToEvidence(Dependency dep, EvidenceType type, JsonObject json, String key) {
|
||||
if (json.containsKey(key)) {
|
||||
final JsonValue value = json.get(key);
|
||||
if (value instanceof JsonString) {
|
||||
dep.addEvidence(type, PACKAGE_JSON, key, ((JsonString) value).getString(), Confidence.HIGHEST);
|
||||
} else if (value instanceof JsonObject) {
|
||||
final JsonObject jsonObject = (JsonObject) value;
|
||||
for (final Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
|
||||
final String property = entry.getKey();
|
||||
final JsonValue subValue = entry.getValue();
|
||||
if (subValue instanceof JsonString) {
|
||||
dep.addEvidence(type, PACKAGE_JSON,
|
||||
String.format("%s.%s", key, property),
|
||||
((JsonString) subValue).getString(),
|
||||
Confidence.HIGHEST);
|
||||
} else {
|
||||
LOGGER.warn("JSON sub-value not string as expected: {}", subValue);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
LOGGER.warn("JSON value not string or JSON object as expected: {}", value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Locates the dependency from the list of dependencies that have been
|
||||
* scanned by the engine.
|
||||
*
|
||||
* @param engine the dependency-check engine
|
||||
* @param name the name of the dependency to find
|
||||
* @param version the version of the dependency to find
|
||||
* @return the identified dependency; otherwise null
|
||||
*/
|
||||
private Dependency findDependency(Engine engine, String name, String version) {
|
||||
for (Dependency d : engine.getDependencies()) {
|
||||
if (DEPENDENCY_ECOSYSTEM.equals(d.getEcosystem()) && name.equals(d.getName()) && version != null && d.getVersion() != null) {
|
||||
String dependencyVersion = d.getVersion();
|
||||
if (dependencyVersion.startsWith("^") || dependencyVersion.startsWith("~")) {
|
||||
dependencyVersion = dependencyVersion.substring(1);
|
||||
}
|
||||
|
||||
if (version.equals(dependencyVersion)) {
|
||||
return d;
|
||||
}
|
||||
if (version.startsWith("^") || version.startsWith("~") || version.contains("*")) {
|
||||
String type;
|
||||
String tmp;
|
||||
if (version.startsWith("^") || version.startsWith("~")) {
|
||||
type = version.substring(0, 1);
|
||||
tmp = version.substring(1);
|
||||
} else {
|
||||
type = "*";
|
||||
tmp = version;
|
||||
}
|
||||
final String[] v = tmp.split(" ")[0].split("\\.");
|
||||
final String[] depVersion = dependencyVersion.split("\\.");
|
||||
|
||||
if ("^".equals(type) && v[0].equals(depVersion[0])) {
|
||||
return d;
|
||||
} else if ("~".equals(type) && v.length >= 2 && depVersion.length >= 2
|
||||
&& v[0].equals(depVersion[0]) && v[1].equals(depVersion[1])) {
|
||||
return d;
|
||||
} else if (v[0].equals("*")
|
||||
|| (v.length >= 2 && v[0].equals(depVersion[0]) && v[1].equals("*"))
|
||||
|| (v.length >= 3 && depVersion.length >= 2 && v[0].equals(depVersion[0])
|
||||
&& v[1].equals(depVersion[1]) && v[2].equals("*"))) {
|
||||
return d;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,8 +17,6 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
@@ -29,7 +27,6 @@ import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* NvdCveAnalyzer is a utility class that takes a project dependency and
|
||||
@@ -44,31 +41,7 @@ public class NvdCveAnalyzer extends AbstractAnalyzer {
|
||||
/**
|
||||
* The Logger for use throughout the class
|
||||
*/
|
||||
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(NvdCveAnalyzer.class);
|
||||
/**
|
||||
* The list of ecosystems to skip during analysis. These are skipped because
|
||||
* there is generally a more accurate vulnerability analyzer in the
|
||||
* pipeline.
|
||||
*/
|
||||
private List<String> skipEcosystems;
|
||||
|
||||
/**
|
||||
* Initializes the analyzer with the configured settings.
|
||||
*
|
||||
* @param settings the configured settings to use
|
||||
*/
|
||||
@Override
|
||||
public void initialize(Settings settings) {
|
||||
super.initialize(settings);
|
||||
final String[] tmp = settings.getArray(Settings.KEYS.ECOSYSTEM_SKIP_NVDCVE);
|
||||
if (tmp == null) {
|
||||
skipEcosystems = new ArrayList<>();
|
||||
} else {
|
||||
LOGGER.info("Skipping NVD CVE Analysis for {}", tmp);
|
||||
skipEcosystems = Arrays.asList(tmp);
|
||||
}
|
||||
}
|
||||
|
||||
//private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(NvdCveAnalyzer.class);
|
||||
/**
|
||||
* Analyzes a dependency and attempts to determine if there are any CPE
|
||||
* identifiers for this dependency.
|
||||
@@ -80,10 +53,6 @@ public class NvdCveAnalyzer extends AbstractAnalyzer {
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (skipEcosystems.contains(dependency.getEcosystem())) {
|
||||
return;
|
||||
}
|
||||
|
||||
final CveDB cveDB = engine.getDatabase();
|
||||
for (Identifier id : dependency.getIdentifiers()) {
|
||||
if ("cpe".equals(id.getType())) {
|
||||
@@ -139,4 +108,4 @@ public class NvdCveAnalyzer extends AbstractAnalyzer {
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_NVD_CVE_ENABLED;
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user