mirror of
https://github.com/ysoftdevs/DependencyCheck.git
synced 2026-01-14 15:53:36 +01:00
Merge branch 'master' of github.com:jeremylong/DependencyCheck
This commit is contained in:
@@ -14,9 +14,16 @@ script executable:
|
||||
|
||||
$ chmod +777 dependency-check.sh
|
||||
|
||||
To scan a folder on the system you can run:
|
||||
#set( $H = '#' )
|
||||
|
||||
$H$H$H Homebrew
|
||||
$ brew install dependency-check
|
||||
|
||||
This puts an executable `dependency-check` script in the `/bin` directory of
|
||||
your homebrew installation.
|
||||
|
||||
To scan a folder on the system you can run:
|
||||
|
||||
$H$H$H Windows
|
||||
dependency-check.bat --app "My App Name" --scan "c:\java\application\lib"
|
||||
|
||||
@@ -29,4 +36,4 @@ $H$H$H Windows
|
||||
dependency-check.bat --help
|
||||
|
||||
$H$H$H *nix
|
||||
dependency-check.sh --help
|
||||
dependency-check.sh --help
|
||||
|
||||
@@ -17,22 +17,6 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.compress.archivers.ArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.ArchiveInputStream;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
|
||||
@@ -40,6 +24,8 @@ import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
|
||||
import org.apache.commons.compress.archivers.zip.ZipFile;
|
||||
import org.apache.commons.compress.compressors.CompressorInputStream;
|
||||
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
|
||||
import org.apache.commons.compress.compressors.bzip2.BZip2Utils;
|
||||
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
|
||||
import org.apache.commons.compress.compressors.gzip.GzipUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
@@ -52,6 +38,9 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* An analyzer that extracts files from archives and ensures any supported files contained within the archive are added to the
|
||||
@@ -100,20 +89,21 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
private static final Set<String> ZIPPABLES = newHashSet("zip", "ear", "war", "jar", "sar", "apk", "nupkg");
|
||||
/**
|
||||
* The set of file extensions supported by this analyzer. Note for developers, any additions to this list will need to be
|
||||
* explicitly handled in extractFiles().
|
||||
* The set of file extensions supported by this analyzer. Note for developers, any additions to this list will need
|
||||
* to be explicitly handled in {@link #extractFiles(File, File, Engine)}.
|
||||
*/
|
||||
private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz");
|
||||
private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz", "bz2", "tbz2");
|
||||
|
||||
/**
|
||||
* Detects files with extensions to remove from the engine's collection of dependencies.
|
||||
*/
|
||||
private static final FileFilter REMOVE_FROM_ANALYSIS = FileFilterBuilder.newInstance().addExtensions("zip", "tar", "gz", "tgz").build();
|
||||
private static final FileFilter REMOVE_FROM_ANALYSIS =
|
||||
FileFilterBuilder.newInstance().addExtensions("zip", "tar", "gz", "tgz", "bz2", "tbz2").build();
|
||||
|
||||
static {
|
||||
final String additionalZipExt = Settings.getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS);
|
||||
if (additionalZipExt != null) {
|
||||
final Set<String> ext = new HashSet<String>(Arrays.asList(additionalZipExt));
|
||||
final Set<String> ext = new HashSet<String>(Collections.singletonList(additionalZipExt));
|
||||
ZIPPABLES.addAll(ext);
|
||||
}
|
||||
EXTENSIONS.addAll(ZIPPABLES);
|
||||
@@ -205,7 +195,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* and added to the list of dependencies within the engine.
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the engine scanning
|
||||
* @param engine the engine scanning
|
||||
* @throws AnalysisException thrown if there is an analysis exception
|
||||
*/
|
||||
@Override
|
||||
@@ -215,15 +205,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
extractFiles(f, tmpDir, engine);
|
||||
|
||||
//make a copy
|
||||
List<Dependency> dependencies = new ArrayList<Dependency>(engine.getDependencies());
|
||||
engine.scan(tmpDir);
|
||||
List<Dependency> newDependencies = engine.getDependencies();
|
||||
if (dependencies.size() != newDependencies.size()) {
|
||||
//get the new dependencies
|
||||
final Set<Dependency> dependencySet = new HashSet<Dependency>();
|
||||
dependencySet.addAll(newDependencies);
|
||||
dependencySet.removeAll(dependencies);
|
||||
|
||||
final Set<Dependency> dependencySet = findMoreDependencies(engine, tmpDir);
|
||||
if (!dependencySet.isEmpty()) {
|
||||
for (Dependency d : dependencySet) {
|
||||
//fix the dependency's display name and path
|
||||
final String displayPath = String.format("%s%s",
|
||||
@@ -245,41 +228,66 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
}
|
||||
if (REMOVE_FROM_ANALYSIS.accept(dependency.getActualFile())) {
|
||||
if (ZIP_FILTER.accept(dependency.getActualFile()) && isZipFileActuallyJarFile(dependency)) {
|
||||
final File tdir = getNextTempDirectory();
|
||||
final String fileName = dependency.getFileName();
|
||||
|
||||
LOGGER.info(String.format("The zip file '%s' appears to be a JAR file, making a copy and analyzing it as a JAR.", fileName));
|
||||
|
||||
final File tmpLoc = new File(tdir, fileName.substring(0, fileName.length() - 3) + "jar");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyFile(tdir, tmpLoc);
|
||||
dependencies = new ArrayList<Dependency>(engine.getDependencies());
|
||||
engine.scan(tmpLoc);
|
||||
newDependencies = engine.getDependencies();
|
||||
if (dependencies.size() != newDependencies.size()) {
|
||||
//get the new dependencies
|
||||
final Set<Dependency> dependencySet = new HashSet<Dependency>();
|
||||
dependencySet.addAll(newDependencies);
|
||||
dependencySet.removeAll(dependencies);
|
||||
if (dependencySet.size() != 1) {
|
||||
LOGGER.info("Deep copy of ZIP to JAR file resulted in more then one dependency?");
|
||||
}
|
||||
for (Dependency d : dependencySet) {
|
||||
//fix the dependency's display name and path
|
||||
d.setFilePath(dependency.getFilePath());
|
||||
d.setDisplayFileName(dependency.getFileName());
|
||||
}
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Unable to perform deep copy on '{}'", dependency.getActualFile().getPath(), ex);
|
||||
}
|
||||
}
|
||||
addDisguisedJarsToDependencies(dependency, engine);
|
||||
engine.getDependencies().remove(dependency);
|
||||
}
|
||||
Collections.sort(engine.getDependencies());
|
||||
}
|
||||
|
||||
private void addDisguisedJarsToDependencies(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (ZIP_FILTER.accept(dependency.getActualFile()) && isZipFileActuallyJarFile(dependency)) {
|
||||
final File tdir = getNextTempDirectory();
|
||||
final String fileName = dependency.getFileName();
|
||||
|
||||
LOGGER.info(String.format("The zip file '%s' appears to be a JAR file, making a copy and analyzing it as a JAR.", fileName));
|
||||
|
||||
final File tmpLoc = new File(tdir, fileName.substring(0, fileName.length() - 3) + "jar");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyFile(tdir, tmpLoc);
|
||||
final Set<Dependency> dependencySet = findMoreDependencies(engine, tmpLoc);
|
||||
if (!dependencySet.isEmpty()) {
|
||||
if (dependencySet.size() != 1) {
|
||||
LOGGER.info("Deep copy of ZIP to JAR file resulted in more than one dependency?");
|
||||
}
|
||||
for (Dependency d : dependencySet) {
|
||||
//fix the dependency's display name and path
|
||||
d.setFilePath(dependency.getFilePath());
|
||||
d.setDisplayFileName(dependency.getFileName());
|
||||
}
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Unable to perform deep copy on '{}'", dependency.getActualFile().getPath(), ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static final Set<Dependency> EMPTY_DEPENDENCY_SET = Collections.emptySet();
|
||||
|
||||
/**
|
||||
* Scan the given file/folder, and return any new dependencies found.
|
||||
*
|
||||
* @param engine used to scan
|
||||
* @param file target of scanning
|
||||
* @return any dependencies that weren't known to the engine before
|
||||
*/
|
||||
private static Set<Dependency> findMoreDependencies(Engine engine, File file) {
|
||||
List<Dependency> before = new ArrayList<Dependency>(engine.getDependencies());
|
||||
engine.scan(file);
|
||||
List<Dependency> after = engine.getDependencies();
|
||||
final boolean sizeChanged = before.size() != after.size();
|
||||
final Set<Dependency> newDependencies;
|
||||
if (sizeChanged) {
|
||||
//get the new dependencies
|
||||
newDependencies = new HashSet<Dependency>();
|
||||
newDependencies.addAll(after);
|
||||
newDependencies.removeAll(before);
|
||||
} else {
|
||||
newDependencies = EMPTY_DEPENDENCY_SET;
|
||||
}
|
||||
return newDependencies;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Retrieves the next temporary directory to extract an archive too.
|
||||
*
|
||||
@@ -303,47 +311,47 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Extracts the contents of an archive into the specified directory.
|
||||
*
|
||||
* @param archive an archive file such as a WAR or EAR
|
||||
* @param archive an archive file such as a WAR or EAR
|
||||
* @param destination a directory to extract the contents to
|
||||
* @param engine the scanning engine
|
||||
* @param engine the scanning engine
|
||||
* @throws AnalysisException thrown if the archive is not found
|
||||
*/
|
||||
private void extractFiles(File archive, File destination, Engine engine) throws AnalysisException {
|
||||
if (archive == null || destination == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
FileInputStream fis = null;
|
||||
try {
|
||||
fis = new FileInputStream(archive);
|
||||
} catch (FileNotFoundException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new AnalysisException("Archive file was not found.", ex);
|
||||
}
|
||||
final String archiveExt = FileUtils.getFileExtension(archive.getName()).toLowerCase();
|
||||
try {
|
||||
if (ZIPPABLES.contains(archiveExt)) {
|
||||
extractArchive(new ZipArchiveInputStream(new BufferedInputStream(fis)), destination, engine);
|
||||
} else if ("tar".equals(archiveExt)) {
|
||||
extractArchive(new TarArchiveInputStream(new BufferedInputStream(fis)), destination, engine);
|
||||
} else if ("gz".equals(archiveExt) || "tgz".equals(archiveExt)) {
|
||||
final String uncompressedName = GzipUtils.getUncompressedFilename(archive.getName());
|
||||
final File f = new File(destination, uncompressedName);
|
||||
if (engine.accept(f)) {
|
||||
decompressFile(new GzipCompressorInputStream(new BufferedInputStream(fis)), f);
|
||||
}
|
||||
}
|
||||
} catch (ArchiveExtractionException ex) {
|
||||
LOGGER.warn("Exception extracting archive '{}'.", archive.getName());
|
||||
LOGGER.debug("", ex);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.warn("Exception reading archive '{}'.", archive.getName());
|
||||
LOGGER.debug("", ex);
|
||||
} finally {
|
||||
if (archive != null && destination != null) {
|
||||
FileInputStream fis;
|
||||
try {
|
||||
fis.close();
|
||||
} catch (IOException ex) {
|
||||
fis = new FileInputStream(archive);
|
||||
} catch (FileNotFoundException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new AnalysisException("Archive file was not found.", ex);
|
||||
}
|
||||
final String archiveExt = FileUtils.getFileExtension(archive.getName()).toLowerCase();
|
||||
try {
|
||||
if (ZIPPABLES.contains(archiveExt)) {
|
||||
extractArchive(new ZipArchiveInputStream(new BufferedInputStream(fis)), destination, engine);
|
||||
} else if ("tar".equals(archiveExt)) {
|
||||
extractArchive(new TarArchiveInputStream(new BufferedInputStream(fis)), destination, engine);
|
||||
} else if ("gz".equals(archiveExt) || "tgz".equals(archiveExt)) {
|
||||
final String uncompressedName = GzipUtils.getUncompressedFilename(archive.getName());
|
||||
final File f = new File(destination, uncompressedName);
|
||||
if (engine.accept(f)) {
|
||||
decompressFile(new GzipCompressorInputStream(new BufferedInputStream(fis)), f);
|
||||
}
|
||||
} else if ("bz2".equals(archiveExt) || "tbz2".equals(archiveExt)) {
|
||||
final String uncompressedName = BZip2Utils.getUncompressedFilename(archive.getName());
|
||||
final File f = new File(destination, uncompressedName);
|
||||
if (engine.accept(f)) {
|
||||
decompressFile(new BZip2CompressorInputStream(new BufferedInputStream(fis)), f);
|
||||
}
|
||||
}
|
||||
} catch (ArchiveExtractionException ex) {
|
||||
LOGGER.warn("Exception extracting archive '{}'.", archive.getName());
|
||||
LOGGER.debug("", ex);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.warn("Exception reading archive '{}'.", archive.getName());
|
||||
LOGGER.debug("", ex);
|
||||
} finally {
|
||||
close(fis);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -351,84 +359,63 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Extracts files from an archive.
|
||||
*
|
||||
* @param input the archive to extract files from
|
||||
* @param input the archive to extract files from
|
||||
* @param destination the location to write the files too
|
||||
* @param engine the dependency-check engine
|
||||
* @param engine the dependency-check engine
|
||||
* @throws ArchiveExtractionException thrown if there is an exception extracting files from the archive
|
||||
*/
|
||||
private void extractArchive(ArchiveInputStream input, File destination, Engine engine) throws ArchiveExtractionException {
|
||||
ArchiveEntry entry;
|
||||
try {
|
||||
while ((entry = input.getNextEntry()) != null) {
|
||||
final File file = new File(destination, entry.getName());
|
||||
if (entry.isDirectory()) {
|
||||
final File d = new File(destination, entry.getName());
|
||||
if (!d.exists()) {
|
||||
if (!d.mkdirs()) {
|
||||
final String msg = String.format("Unable to create directory '%s'.", d.getAbsolutePath());
|
||||
throw new AnalysisException(msg);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
final File file = new File(destination, entry.getName());
|
||||
if (engine.accept(file)) {
|
||||
LOGGER.debug("Extracting '{}'", file.getPath());
|
||||
BufferedOutputStream bos = null;
|
||||
FileOutputStream fos = null;
|
||||
try {
|
||||
final File parent = file.getParentFile();
|
||||
if (!parent.isDirectory()) {
|
||||
if (!parent.mkdirs()) {
|
||||
final String msg = String.format("Unable to build directory '%s'.", parent.getAbsolutePath());
|
||||
throw new AnalysisException(msg);
|
||||
}
|
||||
}
|
||||
fos = new FileOutputStream(file);
|
||||
bos = new BufferedOutputStream(fos, BUFFER_SIZE);
|
||||
int count;
|
||||
final byte[] data = new byte[BUFFER_SIZE];
|
||||
while ((count = input.read(data, 0, BUFFER_SIZE)) != -1) {
|
||||
bos.write(data, 0, count);
|
||||
}
|
||||
bos.flush();
|
||||
} catch (FileNotFoundException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
final String msg = String.format("Unable to find file '%s'.", file.getName());
|
||||
throw new AnalysisException(msg, ex);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
final String msg = String.format("IO Exception while parsing file '%s'.", file.getName());
|
||||
throw new AnalysisException(msg, ex);
|
||||
} finally {
|
||||
if (bos != null) {
|
||||
try {
|
||||
bos.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("", ex);
|
||||
}
|
||||
}
|
||||
if (fos != null) {
|
||||
try {
|
||||
fos.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!file.exists() && !file.mkdirs()) {
|
||||
final String msg = String.format("Unable to create directory '%s'.", file.getAbsolutePath());
|
||||
throw new AnalysisException(msg);
|
||||
}
|
||||
} else if (engine.accept(file)) {
|
||||
extractAcceptedFile(input, file);
|
||||
}
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new ArchiveExtractionException(ex);
|
||||
} catch (Throwable ex) {
|
||||
throw new ArchiveExtractionException(ex);
|
||||
} finally {
|
||||
if (input != null) {
|
||||
try {
|
||||
input.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("", ex);
|
||||
close(input);
|
||||
}
|
||||
}
|
||||
|
||||
private static void extractAcceptedFile(ArchiveInputStream input, File file) throws AnalysisException {
|
||||
LOGGER.debug("Extracting '{}'", file.getPath());
|
||||
BufferedOutputStream bos = null;
|
||||
FileOutputStream fos = null;
|
||||
try {
|
||||
final File parent = file.getParentFile();
|
||||
if (!parent.isDirectory()) {
|
||||
if (!parent.mkdirs()) {
|
||||
final String msg = String.format("Unable to build directory '%s'.", parent.getAbsolutePath());
|
||||
throw new AnalysisException(msg);
|
||||
}
|
||||
}
|
||||
fos = new FileOutputStream(file);
|
||||
bos = new BufferedOutputStream(fos, BUFFER_SIZE);
|
||||
int count;
|
||||
final byte[] data = new byte[BUFFER_SIZE];
|
||||
while ((count = input.read(data, 0, BUFFER_SIZE)) != -1) {
|
||||
bos.write(data, 0, count);
|
||||
}
|
||||
bos.flush();
|
||||
} catch (FileNotFoundException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
final String msg = String.format("Unable to find file '%s'.", file.getName());
|
||||
throw new AnalysisException(msg, ex);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
final String msg = String.format("IO Exception while parsing file '%s'.", file.getName());
|
||||
throw new AnalysisException(msg, ex);
|
||||
} finally {
|
||||
close(bos);
|
||||
close(fos);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -436,7 +423,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* Decompresses a file.
|
||||
*
|
||||
* @param inputStream the compressed file
|
||||
* @param outputFile the location to write the decompressed file
|
||||
* @param outputFile the location to write the decompressed file
|
||||
* @throws ArchiveExtractionException thrown if there is an exception decompressing the file
|
||||
*/
|
||||
private void decompressFile(CompressorInputStream inputStream, File outputFile) throws ArchiveExtractionException {
|
||||
@@ -445,7 +432,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
try {
|
||||
out = new FileOutputStream(outputFile);
|
||||
final byte[] buffer = new byte[BUFFER_SIZE];
|
||||
int n = 0;
|
||||
int n; // = 0
|
||||
while (-1 != (n = inputStream.read(buffer))) {
|
||||
out.write(buffer, 0, n);
|
||||
}
|
||||
@@ -456,12 +443,21 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.debug("", ex);
|
||||
throw new ArchiveExtractionException(ex);
|
||||
} finally {
|
||||
if (out != null) {
|
||||
try {
|
||||
out.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("", ex);
|
||||
}
|
||||
close(out);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the given {@link Closeable} instance, ignoring nulls, and logging any thrown {@link IOException}.
|
||||
*
|
||||
* @param closeable to be closed
|
||||
*/
|
||||
private static void close(Closeable closeable){
|
||||
if (null != closeable) {
|
||||
try {
|
||||
closeable.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,6 +51,8 @@ public class ArchiveAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
|
||||
expResult.add("tar");
|
||||
expResult.add("gz");
|
||||
expResult.add("tgz");
|
||||
expResult.add("bz2");
|
||||
expResult.add("tbz2");
|
||||
for (String ext : expResult) {
|
||||
assertTrue(ext, instance.accept(new File("test." + ext)));
|
||||
}
|
||||
@@ -197,28 +199,31 @@ public class ArchiveAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
// /**
|
||||
// * Test of analyze method, of class ArchiveAnalyzer.
|
||||
// */
|
||||
// @Test
|
||||
// public void testNestedZipFolder() throws Exception {
|
||||
// ArchiveAnalyzer instance = new ArchiveAnalyzer();
|
||||
// try {
|
||||
// instance.initialize();
|
||||
//
|
||||
// File file = new File(this.getClass().getClassLoader().getResource("nested.zip").getPath());
|
||||
// Settings.setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
// Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, false);
|
||||
// Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, false);
|
||||
// Engine engine = new Engine();
|
||||
//
|
||||
// engine.scan(file);
|
||||
// engine.analyzeDependencies();
|
||||
//
|
||||
// } finally {
|
||||
// instance.close();
|
||||
// }
|
||||
// }
|
||||
/**
|
||||
* Test of analyze method, of class ArchiveAnalyzer.
|
||||
*/
|
||||
@Test
|
||||
public void testAnalyzeTarBz2() throws Exception {
|
||||
ArchiveAnalyzer instance = new ArchiveAnalyzer();
|
||||
instance.accept(new File("zip")); //ensure analyzer is "enabled"
|
||||
try {
|
||||
instance.initialize();
|
||||
File file = BaseTest.getResourceAsFile(this, "file.tar.bz2");
|
||||
Settings.setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, false);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, false);
|
||||
Engine engine = new Engine();
|
||||
int initial_size = engine.getDependencies().size();
|
||||
engine.scan(file);
|
||||
engine.analyzeDependencies();
|
||||
int ending_size = engine.getDependencies().size();
|
||||
engine.cleanup();
|
||||
assertTrue(initial_size < ending_size);
|
||||
} finally {
|
||||
instance.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of analyze method, of class ArchiveAnalyzer.
|
||||
*/
|
||||
@@ -248,6 +253,31 @@ public class ArchiveAnalyzerIntegrationTest extends AbstractDatabaseTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of analyze method, of class ArchiveAnalyzer.
|
||||
*/
|
||||
@Test
|
||||
public void testAnalyzeTbz2() throws Exception {
|
||||
ArchiveAnalyzer instance = new ArchiveAnalyzer();
|
||||
instance.accept(new File("zip")); //ensure analyzer is "enabled"
|
||||
try {
|
||||
instance.initialize();
|
||||
File file = BaseTest.getResourceAsFile(this, "file.tbz2");
|
||||
Settings.setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, false);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, false);
|
||||
Engine engine = new Engine();
|
||||
int initial_size = engine.getDependencies().size();
|
||||
engine.scan(file);
|
||||
engine.analyzeDependencies();
|
||||
int ending_size = engine.getDependencies().size();
|
||||
engine.cleanup();
|
||||
assertTrue(initial_size < ending_size);
|
||||
} finally {
|
||||
instance.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of analyze method, of class ArchiveAnalyzer.
|
||||
*/
|
||||
|
||||
BIN
dependency-check-core/src/test/resources/file.tar.bz2
Normal file
BIN
dependency-check-core/src/test/resources/file.tar.bz2
Normal file
Binary file not shown.
BIN
dependency-check-core/src/test/resources/file.tbz2
Normal file
BIN
dependency-check-core/src/test/resources/file.tbz2
Normal file
Binary file not shown.
1
dependency-check-gradle/.gitignore
vendored
1
dependency-check-gradle/.gitignore
vendored
@@ -1,5 +1,6 @@
|
||||
.idea/
|
||||
.gradle
|
||||
gradle/
|
||||
|
||||
*.iml
|
||||
*.ipr
|
||||
|
||||
@@ -7,6 +7,8 @@ This is a DependencyCheck gradle plugin designed for project which use Gradle as
|
||||
|
||||
Dependency-Check is a utility that attempts to detect publicly disclosed vulnerabilities contained within project dependencies. It does this by determining if there is a Common Platform Enumeration (CPE) identifier for a given dependency. If found, it will generate a report linking to the associated CVE entries.
|
||||
|
||||
Current latest version is `0.0.6`
|
||||
|
||||
=========
|
||||
|
||||
## Usage
|
||||
@@ -15,7 +17,7 @@ Dependency-Check is a utility that attempts to detect publicly disclosed vulnera
|
||||
|
||||
Please refer to either one of the solution
|
||||
|
||||
#### Solution 1,Install from Maven Central
|
||||
#### Solution 1,Install from Maven Central (Recommended)
|
||||
|
||||
```groovy
|
||||
buildscript {
|
||||
@@ -23,7 +25,7 @@ buildscript {
|
||||
mavenCentral()
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.thoughtworks.tools:dependency-check:0.0.5'
|
||||
classpath 'com.thoughtworks.tools:dependency-check:0.0.6'
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -38,7 +40,7 @@ apply plugin: 'dependency.check'
|
||||
|
||||
```groovy
|
||||
plugins {
|
||||
id "dependency.check" version "0.0.5"
|
||||
id "dependency.check" version "0.0.6"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -52,11 +54,11 @@ buildscript {
|
||||
}
|
||||
}
|
||||
dependencies {
|
||||
classpath "gradle.plugin.com.tools.security:dependency-check:0.0.5"
|
||||
classpath "gradle.plugin.com.tools.security:dependency-check:0.0.6"
|
||||
}
|
||||
}
|
||||
|
||||
apply plugin: "dependency.check"
|
||||
apply plugin: "dependency-check"
|
||||
```
|
||||
|
||||
#### Solution 3,Install from Bintray
|
||||
@@ -73,7 +75,7 @@ buildscript {
|
||||
}
|
||||
dependencies {
|
||||
classpath(
|
||||
'com.tools.security:dependency-check:0.0.5'
|
||||
'com.tools.security:dependency-check:0.0.6'
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -115,6 +117,19 @@ dependencyCheck {
|
||||
}
|
||||
```
|
||||
|
||||
In addition, if the proxy only allow HTTP `GET` or `POST` methods, you will find that the update process will always fail,
|
||||
the root cause is that every time you run `dependencyCheck` task, it will try to query the latest timestamp to determine whether need to perform an update action,
|
||||
and for performance reason the HTTP method it uses by default is `HEAD`, which probably is disabled or not supported by the proxy. To avoid this problem, you can simply change the HTTP method by below configuration:
|
||||
|
||||
```groovy
|
||||
dependencyCheck {
|
||||
proxyServer = "127.0.0.1" // required, the server name or IP address of the proxy
|
||||
proxyPort = 3128 // required, the port number of the proxy
|
||||
|
||||
quickQueryTimestamp = false // when set to false, it means use HTTP GET method to query timestamp. (default value is true)
|
||||
}
|
||||
```
|
||||
|
||||
### What if my project includes multiple sub-project? How can I use this plugin for each of them including the root project?
|
||||
|
||||
Try put 'apply plugin: "dependency-check"' inside the 'allprojects' or 'subprojects' if you'd like to check all sub-projects only, see below:
|
||||
@@ -127,7 +142,7 @@ buildscript {
|
||||
mavenCentral()
|
||||
}
|
||||
dependencies {
|
||||
classpath "gradle.plugin.com.tools.security:dependency-check:0.0.5"
|
||||
classpath "gradle.plugin.com.tools.security:dependency-check:0.0.6"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -144,7 +159,7 @@ buildscript {
|
||||
mavenCentral()
|
||||
}
|
||||
dependencies {
|
||||
classpath "gradle.plugin.com.tools.security:dependency-check:0.0.5"
|
||||
classpath "gradle.plugin.com.tools.security:dependency-check:0.0.6"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -46,23 +46,15 @@ dependencies {
|
||||
compile(
|
||||
localGroovy(),
|
||||
gradleApi(),
|
||||
'org.owasp:dependency-check-core:1.2.11',
|
||||
'org.owasp:dependency-check-utils:1.2.11'
|
||||
'org.owasp:dependency-check-core:1.3.0',
|
||||
'org.owasp:dependency-check-utils:1.3.0'
|
||||
)
|
||||
|
||||
testCompile ('com.netflix.nebula:nebula-test:2.2.+'){
|
||||
testCompile ('com.netflix.nebula:nebula-test:2.2.2'){
|
||||
exclude group: 'org.codehaus.groovy'
|
||||
}
|
||||
}
|
||||
|
||||
group = 'com.thoughtworks.tools'
|
||||
version = '0.0.5'
|
||||
|
||||
apply from: 'conf/publish/local.gradle'
|
||||
//apply from: 'conf/publish/maven.gradle'
|
||||
apply from: 'conf/publish/gradlePluginsPortal.gradle'
|
||||
//apply from: 'conf/publish/bintray.gradle' // according to the documentation of plugindev, this line has to be placed and the very end of the build file
|
||||
|
||||
sourceSets {
|
||||
integTest {
|
||||
groovy.srcDir file('src/integTest/groovy')
|
||||
@@ -78,4 +70,12 @@ task integTest(type: Test) {
|
||||
classpath = sourceSets.integTest.runtimeClasspath
|
||||
reports.html.destination = file("$buildDir/reports/integ")
|
||||
jvmArgs '-XX:MaxPermSize=256m'
|
||||
}
|
||||
}
|
||||
|
||||
group = 'com.thoughtworks.tools'
|
||||
version = '0.0.6'
|
||||
|
||||
apply from: 'conf/publish/local.gradle'
|
||||
//apply from: 'conf/publish/maven.gradle'
|
||||
apply from: 'conf/publish/gradlePluginsPortal.gradle'
|
||||
//apply from: 'conf/publish/bintray.gradle' // according to the documentation of plugindev, this line has to be placed and the very end of the build file
|
||||
@@ -66,6 +66,11 @@ task javadocJar(type: Jar) {
|
||||
from javadoc
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||
classifier = 'sources'
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
|
||||
artifacts {
|
||||
archives javadocJar, sourcesJar
|
||||
}
|
||||
|
||||
@@ -31,4 +31,6 @@ class DependencyCheckConfigurationExtension {
|
||||
String cveUrl20Base = "https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml.gz"
|
||||
|
||||
String outputDirectory = "./reports"
|
||||
|
||||
Boolean quickQueryTimestamp = true;
|
||||
}
|
||||
|
||||
@@ -49,6 +49,7 @@ class DependencyCheckGradlePlugin implements Plugin<Project> {
|
||||
conventionMapping.cveUrl12Base = { extension.cveUrl12Base }
|
||||
conventionMapping.cveUrl20Base = { extension.cveUrl20Base }
|
||||
conventionMapping.outputDirectory = { extension.outputDirectory }
|
||||
conventionMapping.quickQueryTimestamp = { extension.quickQueryTimestamp }
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -28,6 +28,7 @@ import org.owasp.dependencycheck.dependency.Dependency
|
||||
import org.owasp.dependencycheck.reporting.ReportGenerator
|
||||
import org.owasp.dependencycheck.utils.Settings
|
||||
|
||||
import static org.owasp.dependencycheck.utils.Settings.setBoolean
|
||||
import static org.owasp.dependencycheck.utils.Settings.setString
|
||||
|
||||
class DependencyCheckTask extends DefaultTask {
|
||||
@@ -47,6 +48,8 @@ class DependencyCheckTask extends DefaultTask {
|
||||
|
||||
String outputDirectory = "./reports"
|
||||
|
||||
Boolean quickQueryTimestamp = true;
|
||||
|
||||
DependencyCheckTask() {
|
||||
group = 'Dependency Check'
|
||||
description = 'Produce dependency security report.'
|
||||
@@ -73,6 +76,7 @@ class DependencyCheckTask extends DefaultTask {
|
||||
Settings.initialize()
|
||||
overrideProxySetting()
|
||||
overrideCveUrlSetting()
|
||||
overrideDownloaderSetting()
|
||||
}
|
||||
|
||||
def cleanup(engine) {
|
||||
@@ -140,4 +144,8 @@ class DependencyCheckTask extends DefaultTask {
|
||||
setString(Settings.KEYS.CVE_SCHEMA_2_0, getCveUrl20Base())
|
||||
setString(Settings.KEYS.CVE_SCHEMA_1_2, getCveUrl12Base())
|
||||
}
|
||||
|
||||
def overrideDownloaderSetting() {
|
||||
setBoolean(Settings.KEYS.DOWNLOADER_QUICK_QUERY_TIMESTAMP, getQuickQueryTimestamp())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
#
|
||||
# This file is part of dependency-check-gradle.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Copyright (c) 2015 Wei Ma. All Rights Reserved.
|
||||
#
|
||||
|
||||
implementation-class=com.tools.security.plugin.DependencyCheckGradlePlugin
|
||||
@@ -58,6 +58,7 @@ class DependencyCheckGradlePluginSpec extends PluginProjectSpec {
|
||||
task.cveUrl12Base == 'https://nvd.nist.gov/download/nvdcve-%d.xml.gz'
|
||||
task.cveUrl20Base == 'https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml.gz'
|
||||
task.outputDirectory == './reports'
|
||||
task.quickQueryTimestamp == true
|
||||
}
|
||||
|
||||
def 'tasks use correct values when extension is used'() {
|
||||
@@ -73,6 +74,7 @@ class DependencyCheckGradlePluginSpec extends PluginProjectSpec {
|
||||
cveUrl12Base = 'cveUrl12Base'
|
||||
cveUrl20Base = 'cveUrl20Base'
|
||||
outputDirectory = 'outputDirectory'
|
||||
quickQueryTimestamp = false
|
||||
}
|
||||
|
||||
then:
|
||||
@@ -87,5 +89,6 @@ class DependencyCheckGradlePluginSpec extends PluginProjectSpec {
|
||||
task.cveUrl12Base == 'cveUrl12Base'
|
||||
task.cveUrl20Base == 'cveUrl20Base'
|
||||
task.outputDirectory == 'outputDirectory'
|
||||
task.quickQueryTimestamp == false
|
||||
}
|
||||
}
|
||||
|
||||
93
src/site/markdown/data/cachenvd.md
Normal file
93
src/site/markdown/data/cachenvd.md
Normal file
@@ -0,0 +1,93 @@
|
||||
Snapshotting the NVD
|
||||
====================
|
||||
|
||||
The [Mirroring the NVD from NIST](./mirrornvd.html) topic describes briefly
|
||||
how to use the [Nist-Data-Mirror](https://github.com/stevespringett/nist-data-mirror/)
|
||||
project to cache the NVD locally and run Dependency Check (D-C) against the
|
||||
local cache.
|
||||
|
||||
This topic goes into a bit more depth with the [cli](../dependency-check-cli/index.html)
|
||||
client, focusing on the following use case.
|
||||
|
||||
1. You wish to have daily local snapshots of the NVD, so that
|
||||
2. in order to compare later runs of D-C with earlier runs, you can compare
|
||||
"apples with apples".
|
||||
|
||||
In other words: It is sometimes desirable to run a comparison D-C analysis
|
||||
against the same NVD snapshot that an earlier D-C report used.
|
||||
|
||||
In the steps below, concrete examples will be given assuming an Ubuntu Linux
|
||||
system. Hopefully, enough explanation is provided that the steps can easily be
|
||||
translated to other systems.
|
||||
|
||||
Build Nist-Data-Mirror
|
||||
----------------------
|
||||
|
||||
1. Perform a "git clone" of [Nist-Data-Mirror](https://github.com/stevespringett/nist-data-mirror/)
|
||||
2. Install gradle, if necessary. See [here](http://gradle.org/gradle-download/)
|
||||
or your Linux distributions package management system. (e.g.,
|
||||
`sudo apt-get install gradle`).
|
||||
3. Follow the [build instructions](https://github.com/stevespringett/nist-data-mirror/blob/master/README.md#user-content-build).
|
||||
You will be left with a build artifact called `nist-data-mirror-1.0.0.jar`.
|
||||
|
||||
Set Up a Daily NVD Download Job
|
||||
-------------------------------
|
||||
|
||||
On Linux, the way to do this using the [cron daemon](http://linux.die.net/man/8/cron).
|
||||
"Cron jobs" are configured by invoking [crontab](http://linux.die.net/man/5/crontab).
|
||||
For example, invoke `crontab -e` to add a line like the following to your
|
||||
crontab file:
|
||||
|
||||
4 5 * * * ~/.local/bin/nvd_download.sh ~/NVD ~/.local/jars
|
||||
|
||||
This would run a job on your system at 4:05 AM daily to run the
|
||||
[nvd_download.sh](general/nvd_download.sh) shell script with the two given
|
||||
arguments. The script is simple:
|
||||
|
||||
```sh
|
||||
#!/bin/sh
|
||||
NVD_ROOT=$1/`date -I`
|
||||
JAR_PATH=$2/nist-data-mirror-1.0.0.jar
|
||||
java -jar $JAR_PATH $NVD_ROOT
|
||||
rm $NVD_ROOT/*.xml # D-C works directly with .gz files anyway.
|
||||
```
|
||||
|
||||
Nist-Data-Mirror will automatically create the directory, download the
|
||||
.xml.gz files, and extract the .xml files alongside them. Given the parameters
|
||||
in the cron example above, the new directory will be `~/NVD/2015-08-03` if
|
||||
executed on August 3<sup>rd</sup>, 2015. The download for 2015-08-03 pulled 47
|
||||
MiB, and took up a total of 668 MiB after extracting from the compressed
|
||||
archive format. It turns out that D-C works directly with the .xml.gz files,
|
||||
so the above script preserves disk space by deleting the .xml files.
|
||||
|
||||
Invoke the Command-Line Using a Specific Daily Snapshot
|
||||
-------------------------------------------------------
|
||||
|
||||
An example script named [dep-check-date.sh](general/dep-check-date.sh) is
|
||||
shown below, which facilitates a D-C scan against an arbitrary NVD snapshot:
|
||||
|
||||
```sh
|
||||
#!/bin/sh
|
||||
CLI_LOCATION=~/.local/dependency-check-1.2.11
|
||||
CLI_SCRIPT=$CLI_LOCATION/bin/dependency-check.sh
|
||||
NVD_PATH=$1/`date -I -d $2`
|
||||
NVD=file://$NVD_PATH
|
||||
shift 2 # We've used the first two params. The rest go to CLI_SCRIPT.
|
||||
$CLI_SCRIPT --cveUrl20Base $NVD/nvdcve-2.0-%d.xml.gz \
|
||||
--cveUrl12Base $NVD/nvdcve-%d.xml.gz \
|
||||
--cveUrl20Modified $NVD/nvdcve-2.0-Modified.xml.gz \
|
||||
--cveUrl12Modified $NVD/nvdcve-Modified.xml.gz \
|
||||
--data $NVD_PATH $@
|
||||
```
|
||||
|
||||
The script takes advantage of the `date` command's ability to parse a variety
|
||||
of date formats. The following invokation would successfully point to the
|
||||
`~/NVD/2015-08-03` folder.
|
||||
|
||||
$ ./dep-check-date.sh ~/NVD "08/03/2015" -app Foo -scan /path/to/Foo --out ~/DCreports/FooFollowup/
|
||||
|
||||
If today happened to be August 4th, 2015, `"yesterday"` also would have
|
||||
worked. Also notice the usage of the `--data` parameter. This places the H2
|
||||
database file directly in the folder alongside the .xml.gz files. This is
|
||||
critical, so that D-C doesn't run against another version of the database,
|
||||
like the usual default in `$CLI_LOCATION/data`.
|
||||
11
src/site/resources/general/dep-check-date.sh
Executable file
11
src/site/resources/general/dep-check-date.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/bin/sh
|
||||
CLI_LOCATION=~/.local/dependency-check-1.2.11
|
||||
CLI_SCRIPT=$CLI_LOCATION/bin/dependency-check.sh
|
||||
NVD_PATH=$1/`date -I -d $2`
|
||||
NVD=file://$NVD_PATH
|
||||
shift 2 # We've used the first two params. The rest go to CLI_SCRIPT.
|
||||
$CLI_SCRIPT --cveUrl20Base $NVD/nvdcve-2.0-%d.xml.gz \
|
||||
--cveUrl12Base $NVD/nvdcve-%d.xml.gz \
|
||||
--cveUrl20Modified $NVD/nvdcve-2.0-Modified.xml.gz \
|
||||
--cveUrl12Modified $NVD/nvdcve-Modified.xml.gz \
|
||||
--data $NVD_PATH $@
|
||||
5
src/site/resources/general/nvd_download.sh
Executable file
5
src/site/resources/general/nvd_download.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/sh
|
||||
NVD_ROOT=$1/`date -I`
|
||||
JAR_PATH=$2/nist-data-mirror-1.0.0.jar
|
||||
java -jar $JAR_PATH $NVD_ROOT
|
||||
rm $NVD_ROOT/*.xml # D-C works directly with .gz files anyway.
|
||||
@@ -87,6 +87,7 @@ Copyright (c) 2013 Jeremy Long. All Rights Reserved.
|
||||
<item collapse="true" name="Internet Access Required" href="./data/index.html">
|
||||
<item name="Proxy" href="./data/proxy.html" />
|
||||
<item name="Mirroring NVD" href="./data/mirrornvd.html" />
|
||||
<item name="Snapshotting the NVD" href="./data/cachenvd.html" />
|
||||
<item name="Central DB" href="./data/database.html" />
|
||||
</item>
|
||||
<item name="Related Work" href="./related.html">
|
||||
|
||||
Reference in New Issue
Block a user