mirror of
https://github.com/ysoftdevs/DependencyCheck.git
synced 2026-01-14 07:43:40 +01:00
changes to resolve issues with multiple connections to the embedded H2 database
This commit is contained in:
19
.travis.yml
19
.travis.yml
@@ -29,12 +29,12 @@ matrix:
|
||||
env:
|
||||
- JDK="JDK7"
|
||||
script:
|
||||
- if [ ! -z "$TRAVIS_TAG" ]; then travis_wait 25 mvn install site site:stage -DreleaseTesting; else travis_wait 25 mvn install -DreleaseTesting; fi
|
||||
- if [ ! -z "$TRAVIS_TAG" ]; then travis_wait 35 mvn install site site:stage -DreleaseTesting; else travis_wait 35 mvn install -DreleaseTesting; fi
|
||||
- jdk: oraclejdk8
|
||||
env:
|
||||
- JDK="JDK8"
|
||||
script:
|
||||
- travis_wait 25 mvn install -DreleaseTesting
|
||||
- travis_wait 35 mvn install -DreleaseTesting
|
||||
|
||||
after_success:
|
||||
- if [ "$JDK" == "JDK8" ]; then
|
||||
@@ -43,6 +43,21 @@ after_success:
|
||||
./coverity_scan.sh;
|
||||
fi;
|
||||
|
||||
after_failure:
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/617-hierarchical-cross-deps/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/618-aggregator-purge/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/618-aggregator-update-only/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/629-jackson-dataformat/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/690-threadsafety/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/710-pom-parse-error/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/729-system-scope-resolved/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/729-system-scope-skipped/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/730-multiple-suppression-files/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/730-multiple-suppression-files-configs/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/815-broken-suppression-aggregate/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/846-site-plugin/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/false-positives/build.log
|
||||
|
||||
deploy:
|
||||
- provider: script
|
||||
script: mvn --settings .travis.settings.xml source:jar javadoc:jar package deploy -DskipTests=true -X
|
||||
|
||||
@@ -1001,8 +1001,6 @@ public class Check extends Update {
|
||||
throw new BuildException(msg, ex);
|
||||
}
|
||||
log(msg, ex, Project.MSG_ERR);
|
||||
} finally {
|
||||
getSettings().cleanup(true);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -400,8 +400,6 @@ public class Update extends Purge {
|
||||
throw new BuildException(msg, ex);
|
||||
}
|
||||
log(msg, Project.MSG_ERR);
|
||||
} finally {
|
||||
getSettings().cleanup(true);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -42,6 +42,9 @@ import org.slf4j.LoggerFactory;
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.CopyOption;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
@@ -60,6 +63,9 @@ import java.util.concurrent.TimeUnit;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
|
||||
import static org.owasp.dependencycheck.analyzer.AnalysisPhase.*;
|
||||
import org.owasp.dependencycheck.exception.H2DBLockException;
|
||||
import org.owasp.dependencycheck.utils.FileUtils;
|
||||
import org.owasp.dependencycheck.utils.H2DBLock;
|
||||
|
||||
/**
|
||||
* Scans files, directories, etc. for Dependencies. Analyzers are loaded and
|
||||
@@ -710,7 +716,7 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
}
|
||||
if (autoUpdate) {
|
||||
try {
|
||||
doUpdates();
|
||||
doUpdates(true);
|
||||
} catch (UpdateException ex) {
|
||||
exceptions.add(ex);
|
||||
LOGGER.warn("Unable to update Cached Web DataSource, using local "
|
||||
@@ -724,7 +730,7 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
if (ConnectionFactory.isH2Connection(settings) && !ConnectionFactory.h2DataFileExists(settings)) {
|
||||
throw new ExceptionCollection(new NoDataException("Autoupdate is disabled and the database does not exist"), true);
|
||||
} else {
|
||||
openDatabase();
|
||||
openDatabase(true, true);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new ExceptionCollection(new DatabaseException("Autoupdate is disabled and unable to connect to the database"), true);
|
||||
@@ -854,28 +860,114 @@ public class Engine implements FileFilter, AutoCloseable {
|
||||
* @throws UpdateException thrown if the operation fails
|
||||
*/
|
||||
public void doUpdates() throws UpdateException {
|
||||
doUpdates(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cycles through the cached web data sources and calls update on all of
|
||||
* them.
|
||||
*
|
||||
* @param remainOpen whether or not the database connection should remain
|
||||
* open
|
||||
* @throws UpdateException thrown if the operation fails
|
||||
*/
|
||||
public void doUpdates(boolean remainOpen) throws UpdateException {
|
||||
if (mode.isDatabseRequired()) {
|
||||
openDatabase();
|
||||
LOGGER.info("Checking for updates");
|
||||
final long updateStart = System.currentTimeMillis();
|
||||
final UpdateService service = new UpdateService(serviceClassLoader);
|
||||
final Iterator<CachedWebDataSource> iterator = service.getDataSources();
|
||||
while (iterator.hasNext()) {
|
||||
final CachedWebDataSource source = iterator.next();
|
||||
source.update(this);
|
||||
H2DBLock dblock = null;
|
||||
try {
|
||||
if (ConnectionFactory.isH2Connection(settings)) {
|
||||
dblock = new H2DBLock(settings);
|
||||
LOGGER.debug("locking for update");
|
||||
dblock.lock();
|
||||
}
|
||||
openDatabase(false, false);
|
||||
LOGGER.info("Checking for updates");
|
||||
final long updateStart = System.currentTimeMillis();
|
||||
final UpdateService service = new UpdateService(serviceClassLoader);
|
||||
final Iterator<CachedWebDataSource> iterator = service.getDataSources();
|
||||
while (iterator.hasNext()) {
|
||||
final CachedWebDataSource source = iterator.next();
|
||||
source.update(this);
|
||||
}
|
||||
database.close();
|
||||
database = null;
|
||||
LOGGER.info("Check for updates complete ({} ms)", System.currentTimeMillis() - updateStart);
|
||||
if (remainOpen) {
|
||||
openDatabase(true, false);
|
||||
}
|
||||
} catch (H2DBLockException ex) {
|
||||
throw new UpdateException("Unable to obtain an exclusive lock on the H2 database to perform updates", ex);
|
||||
} finally {
|
||||
if (dblock != null) {
|
||||
dblock.release();
|
||||
}
|
||||
}
|
||||
LOGGER.info("Check for updates complete ({} ms)", System.currentTimeMillis() - updateStart);
|
||||
} else {
|
||||
LOGGER.info("Skipping update check in evidence collection mode.");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the database connection.
|
||||
* <p>
|
||||
* This method is only public for unit/integration testing. This method
|
||||
* should not be called by any integration that uses
|
||||
* dependency-check-core.</p>
|
||||
* <p>
|
||||
* Opens the database connection.</p>
|
||||
*/
|
||||
public void openDatabase() {
|
||||
openDatabase(false, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* This method is only public for unit/integration testing. This method
|
||||
* should not be called by any integration that uses
|
||||
* dependency-check-core.</p>
|
||||
* <p>
|
||||
* Opens the database connection; if readOnly is true a copy of the database
|
||||
* will be made.</p>
|
||||
*
|
||||
* @param readOnly whether or not the database connection should be readonly
|
||||
* @param lockRequired whether or not a lock needs to be acquired when
|
||||
* opening the database
|
||||
*/
|
||||
public void openDatabase(boolean readOnly, boolean lockRequired) {
|
||||
if (mode.isDatabseRequired() && database == null) {
|
||||
//needed to update schema any required schema changes
|
||||
database = new CveDB(settings);
|
||||
if (readOnly
|
||||
&& ConnectionFactory.isH2Connection(settings)
|
||||
&& settings.getString(Settings.KEYS.DB_CONNECTION_STRING).contains("file:%s")) {
|
||||
H2DBLock lock = null;
|
||||
try {
|
||||
File db = ConnectionFactory.getH2DataFile(settings);
|
||||
if (db.isFile()) {
|
||||
database.close();
|
||||
if (lockRequired) {
|
||||
lock = new H2DBLock(settings);
|
||||
lock.lock();
|
||||
}
|
||||
LOGGER.debug("copying database");
|
||||
File temp = settings.getTempDirectory();
|
||||
File tempDB = new File(temp, db.getName());
|
||||
Files.copy(db.toPath(), tempDB.toPath());
|
||||
LOGGER.debug("copying complete '{}'", temp.toPath());
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, temp.getPath());
|
||||
String connStr = settings.getString(Settings.KEYS.DB_CONNECTION_STRING);
|
||||
settings.setString(Settings.KEYS.DB_CONNECTION_STRING, connStr + "ACCESS_MODE_DATA=r");
|
||||
database = new CveDB(settings);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Unable to open db in read only mode", ex);
|
||||
} catch (H2DBLockException ex) {
|
||||
LOGGER.debug("Failed to obtain lock - unable to open db in read only mode", ex);
|
||||
} finally {
|
||||
if (lock != null) {
|
||||
lock.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -105,8 +105,8 @@ public class DependencyCheckScanAgent {
|
||||
*/
|
||||
private boolean autoUpdate = true;
|
||||
/**
|
||||
* Sets whether the data directory should be updated without performing a scan.
|
||||
* Default is false.
|
||||
* Sets whether the data directory should be updated without performing a
|
||||
* scan. Default is false.
|
||||
*/
|
||||
private boolean updateOnly = false;
|
||||
/**
|
||||
@@ -221,8 +221,8 @@ public class DependencyCheckScanAgent {
|
||||
*/
|
||||
private Settings settings;
|
||||
/**
|
||||
* The path to optional dependency-check properties file. This will be
|
||||
* used to side-load additional user-defined properties.
|
||||
* The path to optional dependency-check properties file. This will be used
|
||||
* to side-load additional user-defined properties.
|
||||
* {@link Settings#mergeProperties(String)}
|
||||
*/
|
||||
private String propertiesFilePath;
|
||||
@@ -937,7 +937,7 @@ public class DependencyCheckScanAgent {
|
||||
LOGGER.error("Continuing execution");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
settings.setBoolean(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
@@ -1028,7 +1028,7 @@ public class DependencyCheckScanAgent {
|
||||
final String msg = String.format("%n%nDependency-Check Failure:%n"
|
||||
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater than '%.1f': %s%n"
|
||||
+ "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString());
|
||||
|
||||
|
||||
throw new ScanAgentException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,13 +29,10 @@ import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
import org.owasp.dependencycheck.exception.H2DBLockException;
|
||||
import org.owasp.dependencycheck.utils.DBUtils;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersion;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
|
||||
import org.owasp.dependencycheck.utils.FileUtils;
|
||||
import org.owasp.dependencycheck.utils.H2DBLock;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -115,26 +112,15 @@ public final class ConnectionFactory {
|
||||
return;
|
||||
}
|
||||
Connection conn = null;
|
||||
H2DBLock dblock = null;
|
||||
try {
|
||||
if (isH2Connection()) {
|
||||
dblock = new H2DBLock(settings);
|
||||
LOGGER.debug("locking for init");
|
||||
dblock.lock();
|
||||
}
|
||||
|
||||
//load the driver if necessary
|
||||
final String driverName = settings.getString(Settings.KEYS.DB_DRIVER_NAME, "");
|
||||
if (!driverName.isEmpty()) {
|
||||
LOGGER.debug("Loading driver: {}", driverName);
|
||||
final String driverPath = settings.getString(Settings.KEYS.DB_DRIVER_PATH, "");
|
||||
final String driverPath = settings.getString(Settings.KEYS.DB_DRIVER_PATH, "");
|
||||
if (!driverPath.isEmpty()) {
|
||||
LOGGER.debug("Loading driver '{}' from '{}'", driverName, driverPath);
|
||||
try {
|
||||
if (!driverPath.isEmpty()) {
|
||||
LOGGER.debug("Loading driver from: {}", driverPath);
|
||||
driver = DriverLoader.load(driverName, driverPath);
|
||||
} else {
|
||||
driver = DriverLoader.load(driverName);
|
||||
}
|
||||
LOGGER.debug("Loading driver from: {}", driverPath);
|
||||
driver = DriverLoader.load(driverName, driverPath);
|
||||
} catch (DriverLoadException ex) {
|
||||
LOGGER.debug("Unable to load database driver", ex);
|
||||
throw new DatabaseException("Unable to load database driver", ex);
|
||||
@@ -198,8 +184,6 @@ public final class ConnectionFactory {
|
||||
LOGGER.debug("", dex);
|
||||
throw new DatabaseException("Database schema does not match this version of dependency-check", dex);
|
||||
}
|
||||
} catch (H2DBLockException ex) {
|
||||
throw new DatabaseException("Unable to obtain an exclusive lock on the H2 database to perform initializataion", ex);
|
||||
} finally {
|
||||
if (conn != null) {
|
||||
try {
|
||||
@@ -208,9 +192,6 @@ public final class ConnectionFactory {
|
||||
LOGGER.debug("An error occurred closing the connection", ex);
|
||||
}
|
||||
}
|
||||
if (dblock != null) {
|
||||
dblock.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -272,10 +253,22 @@ public final class ConnectionFactory {
|
||||
* cannot be created
|
||||
*/
|
||||
public static boolean h2DataFileExists(Settings configuration) throws IOException {
|
||||
File file = getH2DataFile(configuration);
|
||||
return file.exists();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a reference to the H2 database file.
|
||||
*
|
||||
* @param configuration the configured settings
|
||||
* @return the path to the H2 database file
|
||||
* @throws IOException thrown if there is an error
|
||||
*/
|
||||
public static File getH2DataFile(Settings configuration) throws IOException {
|
||||
final File dir = configuration.getDataDirectory();
|
||||
final String fileName = configuration.getString(Settings.KEYS.DB_FILE_NAME);
|
||||
final File file = new File(dir, fileName);
|
||||
return file.exists();
|
||||
return file;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -33,7 +33,6 @@ import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.data.nvdcve.ConnectionFactory;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
|
||||
@@ -44,11 +43,9 @@ import org.owasp.dependencycheck.data.update.nvd.DownloadTask;
|
||||
import org.owasp.dependencycheck.data.update.nvd.NvdCveInfo;
|
||||
import org.owasp.dependencycheck.data.update.nvd.ProcessTask;
|
||||
import org.owasp.dependencycheck.data.update.nvd.UpdateableNvdCve;
|
||||
import org.owasp.dependencycheck.exception.H2DBLockException;
|
||||
import org.owasp.dependencycheck.utils.DateUtil;
|
||||
import org.owasp.dependencycheck.utils.Downloader;
|
||||
import org.owasp.dependencycheck.utils.DownloadFailedException;
|
||||
import org.owasp.dependencycheck.utils.H2DBLock;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
@@ -113,17 +110,10 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
if (isUpdateConfiguredFalse()) {
|
||||
return;
|
||||
}
|
||||
H2DBLock dblock = null;
|
||||
try {
|
||||
if (ConnectionFactory.isH2Connection(settings)) {
|
||||
dblock = new H2DBLock(settings);
|
||||
LOGGER.debug("locking for update");
|
||||
dblock.lock();
|
||||
}
|
||||
initializeExecutorServices();
|
||||
dbProperties = cveDb.getDatabaseProperties();
|
||||
|
||||
if (checkUpdate()) {
|
||||
initializeExecutorServices();
|
||||
final UpdateableNvdCve updateable = getUpdatesNeeded();
|
||||
if (updateable.isUpdateNeeded()) {
|
||||
performUpdate(updateable);
|
||||
@@ -140,12 +130,7 @@ public class NvdCveUpdater implements CachedWebDataSource {
|
||||
throw new UpdateException("Unable to download the NVD CVE data.", ex);
|
||||
} catch (DatabaseException ex) {
|
||||
throw new UpdateException("Database Exception, unable to update the data to use the most current data.", ex);
|
||||
} catch (H2DBLockException ex) {
|
||||
throw new UpdateException("Unable to obtain an exclusive lock on the H2 database to perform updates", ex);
|
||||
} finally {
|
||||
if (dblock != null) {
|
||||
dblock.release();
|
||||
}
|
||||
shutdownExecutorServices();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,9 +18,13 @@
|
||||
package org.owasp.dependencycheck.utils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.nio.channels.FileLock;
|
||||
import java.security.SecureRandom;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.Date;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
import org.owasp.dependencycheck.exception.H2DBLockException;
|
||||
@@ -38,6 +42,14 @@ public class H2DBLock {
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(H2DBLock.class);
|
||||
/**
|
||||
* How long to sleep waiting for the lock.
|
||||
*/
|
||||
public static final int SLEEP_DURATION = 10000;
|
||||
/**
|
||||
* Max attempts to obtain a lock.
|
||||
*/
|
||||
public static final int MAX_SLEEP_COUNT = 120;
|
||||
/**
|
||||
* The file lock.
|
||||
*/
|
||||
@@ -54,6 +66,10 @@ public class H2DBLock {
|
||||
* The configured settings.
|
||||
*/
|
||||
private final Settings settings;
|
||||
/**
|
||||
* A random string used to validate the lock.
|
||||
*/
|
||||
private final String magic;
|
||||
|
||||
/**
|
||||
* Constructs a new H2DB Lock object with the configured settings.
|
||||
@@ -62,6 +78,10 @@ public class H2DBLock {
|
||||
*/
|
||||
public H2DBLock(Settings settings) {
|
||||
this.settings = settings;
|
||||
byte[] random = new byte[16];
|
||||
SecureRandom gen = new SecureRandom();
|
||||
gen.nextBytes(random);
|
||||
magic = Checksum.getHex(random);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -85,9 +105,12 @@ public class H2DBLock {
|
||||
if (!lockFile.getParentFile().isDirectory() && !lockFile.mkdir()) {
|
||||
throw new H2DBLockException("Unable to create path to data directory.");
|
||||
}
|
||||
if (lockFile.isFile() && getFileAge(lockFile) > 5 && !lockFile.delete()) {
|
||||
LOGGER.warn("An old db update lock file was found but the system was unable to delete "
|
||||
+ "the file. Consider manually deleting {}", lockFile.getAbsolutePath());
|
||||
if (lockFile.isFile() && getFileAge(lockFile) > 30) {
|
||||
LOGGER.debug("An old db update lock file was found: {}", lockFile.getAbsolutePath());
|
||||
if (!lockFile.delete()) {
|
||||
LOGGER.warn("An old db update lock file was found but the system was unable to delete "
|
||||
+ "the file. Consider manually deleting {}", lockFile.getAbsolutePath());
|
||||
}
|
||||
}
|
||||
int ctr = 0;
|
||||
do {
|
||||
@@ -95,37 +118,50 @@ public class H2DBLock {
|
||||
if (!lockFile.exists() && lockFile.createNewFile()) {
|
||||
file = new RandomAccessFile(lockFile, "rw");
|
||||
lock = file.getChannel().lock();
|
||||
LOGGER.debug("Lock file created ({})", Thread.currentThread().getName());
|
||||
file.writeBytes(magic);
|
||||
file.getChannel().force(true);
|
||||
Thread.sleep(20);
|
||||
file.seek(0);
|
||||
String current = file.readLine();
|
||||
if (current != null && !current.equals(magic)) {
|
||||
lock.close();
|
||||
lock = null;
|
||||
LOGGER.debug("Another process obtained a lock first ({})", Thread.currentThread().getName());
|
||||
} else {
|
||||
Timestamp timestamp = new Timestamp(System.currentTimeMillis());
|
||||
LOGGER.debug("Lock file created ({}) {} @ {}", Thread.currentThread().getName(), magic, timestamp.toString());
|
||||
}
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
} catch (IOException | InterruptedException ex) {
|
||||
LOGGER.trace("Expected error as another thread has likely locked the file", ex);
|
||||
} finally {
|
||||
if (lock == null && file != null) {
|
||||
try {
|
||||
file.close();
|
||||
file = null;
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("Unable to close the ulFile", ex);
|
||||
LOGGER.trace("Unable to close the lock file", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (lock == null || !lock.isValid()) {
|
||||
try {
|
||||
LOGGER.debug("Sleeping thread {} for 5 seconds because an exclusive lock on the database could not be obtained",
|
||||
Thread.currentThread().getName());
|
||||
Thread.sleep(5000);
|
||||
Timestamp timestamp = new Timestamp(System.currentTimeMillis());
|
||||
LOGGER.debug("Sleeping thread {} ({}) for 10 seconds because an exclusive lock on the database could not be obtained ({})",
|
||||
Thread.currentThread().getName(), magic, timestamp.toString());
|
||||
Thread.sleep(SLEEP_DURATION);
|
||||
} catch (InterruptedException ex) {
|
||||
LOGGER.trace("ignorable error, sleep was interrupted.", ex);
|
||||
LOGGER.debug("sleep was interrupted.", ex);
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
}
|
||||
} while (++ctr < 60 && (lock == null || !lock.isValid()));
|
||||
} while (++ctr < MAX_SLEEP_COUNT && (lock == null || !lock.isValid()));
|
||||
if (lock == null || !lock.isValid()) {
|
||||
throw new H2DBLockException("Unable to obtain the update lock, skipping the database update. Skippinig the database update.");
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new H2DBLockException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -137,7 +173,7 @@ public class H2DBLock {
|
||||
lock.release();
|
||||
lock = null;
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("Ignorable exception", ex);
|
||||
LOGGER.debug("Failed to release lock", ex);
|
||||
}
|
||||
}
|
||||
if (file != null) {
|
||||
@@ -145,15 +181,24 @@ public class H2DBLock {
|
||||
file.close();
|
||||
file = null;
|
||||
} catch (IOException ex) {
|
||||
LOGGER.trace("Ignorable exception", ex);
|
||||
LOGGER.debug("Unable to delete lock file", ex);
|
||||
}
|
||||
}
|
||||
if (lockFile != null && lockFile.isFile() && !lockFile.delete()) {
|
||||
LOGGER.error("Lock file '{}' was unable to be deleted. Please manually delete this file.", lockFile.toString());
|
||||
lockFile.deleteOnExit();
|
||||
if (lockFile != null && lockFile.isFile()) {
|
||||
try (RandomAccessFile f = new RandomAccessFile(lockFile, "rw")) {
|
||||
String m = f.readLine();
|
||||
f.close();
|
||||
if (m != null && m.equals(magic) && !lockFile.delete()) {
|
||||
LOGGER.error("Lock file '{}' was unable to be deleted. Please manually delete this file.", lockFile.toString());
|
||||
lockFile.deleteOnExit();
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Error deleting lock file", ex);
|
||||
}
|
||||
}
|
||||
lockFile = null;
|
||||
LOGGER.debug("Lock released ({})", Thread.currentThread().getName());
|
||||
Timestamp timestamp = new Timestamp(System.currentTimeMillis());
|
||||
LOGGER.debug("Lock released ({}) {} @ {}", Thread.currentThread().getName(), magic, timestamp.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -162,9 +207,11 @@ public class H2DBLock {
|
||||
* @param file the file to calculate the age
|
||||
* @return the age of the file
|
||||
*/
|
||||
private long getFileAge(File file) {
|
||||
private double getFileAge(File file) {
|
||||
final Date d = new Date();
|
||||
final long modified = file.lastModified();
|
||||
return (d.getTime() - modified) / 1000 / 60;
|
||||
final double time = (d.getTime() - modified) / 1000 / 60;
|
||||
LOGGER.debug("Lock file age is {} minutes", time);
|
||||
return time;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ data.file_name=dc.h2.db
|
||||
### the gradle PurgeDataExtension.
|
||||
data.version=3.0
|
||||
|
||||
data.connection_string=jdbc:h2:file:%s;MV_STORE=FALSE;AUTOCOMMIT=ON;LOCK_MODE=0;FILE_LOCK=NO
|
||||
data.connection_string=jdbc:h2:file:%s;MV_STORE=FALSE;AUTOCOMMIT=ON;
|
||||
#data.connection_string=jdbc:mysql://localhost:3306/dependencycheck
|
||||
|
||||
# user name and password for the database connection. The inherent case is to use H2.
|
||||
|
||||
@@ -48,35 +48,34 @@ public class EngineIT extends BaseDBTestCase {
|
||||
@Test
|
||||
public void testEngine() throws IOException, InvalidSettingException, DatabaseException, ReportException, ExceptionCollection {
|
||||
String testClasses = "target/test-classes";
|
||||
boolean autoUpdate = getSettings().getBoolean(Settings.KEYS.AUTO_UPDATE);
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
Engine instance = new Engine(getSettings());
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
instance.scan(testClasses);
|
||||
assertTrue(instance.getDependencies().length > 0);
|
||||
try {
|
||||
instance.analyzeDependencies();
|
||||
} catch (ExceptionCollection ex) {
|
||||
Set<String> allowedMessages = new HashSet<>();
|
||||
allowedMessages.add("bundle-audit");
|
||||
allowedMessages.add("AssemblyAnalyzer");
|
||||
//allowedMessages.add("Unable to connect to");
|
||||
for (Throwable t : ex.getExceptions()) {
|
||||
boolean isOk = false;
|
||||
if (t.getMessage() != null) {
|
||||
for (String msg : allowedMessages) {
|
||||
if (t.getMessage().contains(msg)) {
|
||||
isOk = true;
|
||||
break;
|
||||
try (Engine instance = new Engine(getSettings())) {
|
||||
instance.scan(testClasses);
|
||||
assertTrue(instance.getDependencies().length > 0);
|
||||
try {
|
||||
instance.analyzeDependencies();
|
||||
} catch (ExceptionCollection ex) {
|
||||
Set<String> allowedMessages = new HashSet<>();
|
||||
allowedMessages.add("bundle-audit");
|
||||
allowedMessages.add("AssemblyAnalyzer");
|
||||
//allowedMessages.add("Unable to connect to");
|
||||
for (Throwable t : ex.getExceptions()) {
|
||||
boolean isOk = false;
|
||||
if (t.getMessage() != null) {
|
||||
for (String msg : allowedMessages) {
|
||||
if (t.getMessage().contains(msg)) {
|
||||
isOk = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!isOk) {
|
||||
throw ex;
|
||||
if (!isOk) {
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
}
|
||||
instance.writeReports("dependency-check sample", new File("./target/"), "ALL");
|
||||
instance.close();
|
||||
}
|
||||
instance.writeReports("dependency-check sample", new File("./target/"), "ALL");
|
||||
instance.close();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,46 +54,49 @@ public class EngineTest extends BaseDBTestCase {
|
||||
*/
|
||||
@Test
|
||||
public void testScanFile() throws DatabaseException {
|
||||
Engine instance = new Engine(getSettings());
|
||||
instance.addFileTypeAnalyzer(new JarAnalyzer());
|
||||
File file = BaseTest.getResourceAsFile(this, "dwr.jar");
|
||||
Dependency dwr = instance.scanFile(file);
|
||||
file = BaseTest.getResourceAsFile(this, "org.mortbay.jmx.jar");
|
||||
instance.scanFile(file);
|
||||
assertEquals(2, instance.getDependencies().length);
|
||||
try (Engine instance = new Engine(getSettings())) {
|
||||
instance.addFileTypeAnalyzer(new JarAnalyzer());
|
||||
File file = BaseTest.getResourceAsFile(this, "dwr.jar");
|
||||
Dependency dwr = instance.scanFile(file);
|
||||
file = BaseTest.getResourceAsFile(this, "org.mortbay.jmx.jar");
|
||||
instance.scanFile(file);
|
||||
assertEquals(2, instance.getDependencies().length);
|
||||
|
||||
file = BaseTest.getResourceAsFile(this, "dwr.jar");
|
||||
Dependency secondDwr = instance.scanFile(file);
|
||||
file = BaseTest.getResourceAsFile(this, "dwr.jar");
|
||||
Dependency secondDwr = instance.scanFile(file);
|
||||
|
||||
assertEquals(2, instance.getDependencies().length);
|
||||
assertEquals(dwr, secondDwr);
|
||||
assertEquals(2, instance.getDependencies().length);
|
||||
assertEquals(dwr, secondDwr);
|
||||
}
|
||||
}
|
||||
|
||||
@Test(expected = ExceptionCollection.class)
|
||||
public void exceptionDuringAnalysisTaskExecutionIsFatal() throws DatabaseException, ExceptionCollection {
|
||||
final ExecutorService executorService = Executors.newFixedThreadPool(3);
|
||||
final Engine instance = new Engine(getSettings());
|
||||
final List<Throwable> exceptions = new ArrayList<>();
|
||||
|
||||
new Expectations() {
|
||||
{
|
||||
analysisTask.call();
|
||||
result = new IllegalStateException("Analysis task execution threw an exception");
|
||||
}
|
||||
};
|
||||
try (Engine instance = new Engine(getSettings())) {
|
||||
final ExecutorService executorService = Executors.newFixedThreadPool(3);
|
||||
final List<Throwable> exceptions = new ArrayList<>();
|
||||
|
||||
final List<AnalysisTask> failingAnalysisTask = new ArrayList<>();
|
||||
failingAnalysisTask.add(analysisTask);
|
||||
new Expectations() {
|
||||
{
|
||||
analysisTask.call();
|
||||
result = new IllegalStateException("Analysis task execution threw an exception");
|
||||
}
|
||||
};
|
||||
|
||||
new Expectations(instance) {
|
||||
{
|
||||
instance.getExecutorService(analyzer);
|
||||
result = executorService;
|
||||
instance.getAnalysisTasks(analyzer, exceptions);
|
||||
result = failingAnalysisTask;
|
||||
}
|
||||
};
|
||||
instance.executeAnalysisTasks(analyzer, exceptions);
|
||||
assertTrue(executorService.isShutdown());
|
||||
final List<AnalysisTask> failingAnalysisTask = new ArrayList<>();
|
||||
failingAnalysisTask.add(analysisTask);
|
||||
|
||||
new Expectations(instance) {
|
||||
{
|
||||
instance.getExecutorService(analyzer);
|
||||
result = executorService;
|
||||
instance.getAnalysisTasks(analyzer, exceptions);
|
||||
result = failingAnalysisTask;
|
||||
}
|
||||
};
|
||||
instance.executeAnalysisTasks(analyzer, exceptions);
|
||||
assertTrue(executorService.isShutdown());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -128,12 +128,11 @@ public class ArchiveAnalyzerIT extends BaseDBTestCase {
|
||||
instance.initialize(getSettings());
|
||||
//trick the analyzer into thinking it is active.
|
||||
instance.accept(new File("test.ear"));
|
||||
try {
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, false);
|
||||
Engine engine = new Engine(getSettings());
|
||||
|
||||
|
||||
instance.prepare(engine);
|
||||
File file = BaseTest.getResourceAsFile(this, "daytrader-ear-2.1.7.ear");
|
||||
Dependency dependency = new Dependency(file);
|
||||
@@ -141,11 +140,7 @@ public class ArchiveAnalyzerIT extends BaseDBTestCase {
|
||||
int initial_size = engine.getDependencies().length;
|
||||
instance.analyze(dependency, engine);
|
||||
int ending_size = engine.getDependencies().length;
|
||||
|
||||
engine.close();
|
||||
|
||||
assertTrue(initial_size < ending_size);
|
||||
|
||||
} finally {
|
||||
instance.close();
|
||||
}
|
||||
@@ -160,21 +155,17 @@ public class ArchiveAnalyzerIT extends BaseDBTestCase {
|
||||
instance.initialize(getSettings());
|
||||
//trick the analyzer into thinking it is active.
|
||||
instance.accept(new File("test.ear"));
|
||||
try {
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
instance.prepare(null);
|
||||
File file = BaseTest.getResourceAsFile(this, "bootable-0.1.0.jar");
|
||||
Dependency dependency = new Dependency(file);
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, false);
|
||||
Engine engine = new Engine(getSettings());
|
||||
|
||||
|
||||
int initial_size = engine.getDependencies().length;
|
||||
instance.analyze(dependency, engine);
|
||||
int ending_size = engine.getDependencies().length;
|
||||
|
||||
engine.close();
|
||||
|
||||
assertTrue(initial_size < ending_size);
|
||||
|
||||
} finally {
|
||||
@@ -191,7 +182,7 @@ public class ArchiveAnalyzerIT extends BaseDBTestCase {
|
||||
instance.initialize(getSettings());
|
||||
//trick the analyzer into thinking it is active so that it will prepare
|
||||
instance.accept(new File("test.tar"));
|
||||
try {
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
instance.prepare(null);
|
||||
|
||||
//File file = new File(this.getClass().getClassLoader().getResource("file.tar").getPath());
|
||||
@@ -201,15 +192,11 @@ public class ArchiveAnalyzerIT extends BaseDBTestCase {
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, false);
|
||||
Engine engine = new Engine(getSettings());
|
||||
|
||||
|
||||
int initial_size = engine.getDependencies().length;
|
||||
instance.analyze(dependency, engine);
|
||||
int ending_size = engine.getDependencies().length;
|
||||
engine.close();
|
||||
|
||||
assertTrue(initial_size < ending_size);
|
||||
|
||||
} finally {
|
||||
instance.close();
|
||||
}
|
||||
@@ -223,7 +210,7 @@ public class ArchiveAnalyzerIT extends BaseDBTestCase {
|
||||
ArchiveAnalyzer instance = new ArchiveAnalyzer();
|
||||
instance.initialize(getSettings());
|
||||
instance.accept(new File("zip")); //ensure analyzer is "enabled"
|
||||
try {
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
instance.prepare(null);
|
||||
|
||||
//File file = new File(this.getClass().getClassLoader().getResource("file.tar.gz").getPath());
|
||||
@@ -232,14 +219,12 @@ public class ArchiveAnalyzerIT extends BaseDBTestCase {
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, false);
|
||||
Engine engine = new Engine(getSettings());
|
||||
|
||||
|
||||
int initial_size = engine.getDependencies().length;
|
||||
//instance.analyze(dependency, engine);
|
||||
engine.scan(file);
|
||||
engine.analyzeDependencies();
|
||||
int ending_size = engine.getDependencies().length;
|
||||
engine.close();
|
||||
assertTrue(initial_size < ending_size);
|
||||
|
||||
} finally {
|
||||
@@ -255,18 +240,16 @@ public class ArchiveAnalyzerIT extends BaseDBTestCase {
|
||||
ArchiveAnalyzer instance = new ArchiveAnalyzer();
|
||||
instance.initialize(getSettings());
|
||||
instance.accept(new File("zip")); //ensure analyzer is "enabled"
|
||||
try {
|
||||
try (Engine engine = new Engine(getSettings())){
|
||||
instance.prepare(null);
|
||||
File file = BaseTest.getResourceAsFile(this, "file.tar.bz2");
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, false);
|
||||
Engine engine = new Engine(getSettings());
|
||||
int initial_size = engine.getDependencies().length;
|
||||
engine.scan(file);
|
||||
engine.analyzeDependencies();
|
||||
int ending_size = engine.getDependencies().length;
|
||||
engine.close();
|
||||
assertTrue(initial_size < ending_size);
|
||||
} finally {
|
||||
instance.close();
|
||||
@@ -281,7 +264,7 @@ public class ArchiveAnalyzerIT extends BaseDBTestCase {
|
||||
ArchiveAnalyzer instance = new ArchiveAnalyzer();
|
||||
instance.initialize(getSettings());
|
||||
instance.accept(new File("zip")); //ensure analyzer is "enabled"
|
||||
try {
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
instance.prepare(null);
|
||||
|
||||
//File file = new File(this.getClass().getClassLoader().getResource("file.tgz").getPath());
|
||||
@@ -289,13 +272,10 @@ public class ArchiveAnalyzerIT extends BaseDBTestCase {
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, false);
|
||||
Engine engine = new Engine(getSettings());
|
||||
|
||||
int initial_size = engine.getDependencies().length;
|
||||
engine.scan(file);
|
||||
engine.analyzeDependencies();
|
||||
int ending_size = engine.getDependencies().length;
|
||||
engine.close();
|
||||
assertTrue(initial_size < ending_size);
|
||||
|
||||
} finally {
|
||||
@@ -311,18 +291,16 @@ public class ArchiveAnalyzerIT extends BaseDBTestCase {
|
||||
ArchiveAnalyzer instance = new ArchiveAnalyzer();
|
||||
instance.initialize(getSettings());
|
||||
instance.accept(new File("zip")); //ensure analyzer is "enabled"
|
||||
try {
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
instance.prepare(null);
|
||||
File file = BaseTest.getResourceAsFile(this, "file.tbz2");
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, false);
|
||||
Engine engine = new Engine(getSettings());
|
||||
int initial_size = engine.getDependencies().length;
|
||||
engine.scan(file);
|
||||
engine.analyzeDependencies();
|
||||
int ending_size = engine.getDependencies().length;
|
||||
engine.close();
|
||||
assertTrue(initial_size < ending_size);
|
||||
} finally {
|
||||
instance.close();
|
||||
@@ -336,7 +314,7 @@ public class ArchiveAnalyzerIT extends BaseDBTestCase {
|
||||
public void testAnalyze_badZip() throws Exception {
|
||||
ArchiveAnalyzer instance = new ArchiveAnalyzer();
|
||||
instance.initialize(getSettings());
|
||||
try {
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
instance.prepare(null);
|
||||
|
||||
//File file = new File(this.getClass().getClassLoader().getResource("test.zip").getPath());
|
||||
@@ -345,7 +323,6 @@ public class ArchiveAnalyzerIT extends BaseDBTestCase {
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, false);
|
||||
Engine engine = new Engine(getSettings());
|
||||
int initial_size = engine.getDependencies().length;
|
||||
// boolean failed = false;
|
||||
// try {
|
||||
@@ -355,7 +332,6 @@ public class ArchiveAnalyzerIT extends BaseDBTestCase {
|
||||
// }
|
||||
// assertTrue(failed);
|
||||
int ending_size = engine.getDependencies().length;
|
||||
engine.close();
|
||||
assertEquals(initial_size, ending_size);
|
||||
} finally {
|
||||
instance.close();
|
||||
|
||||
@@ -154,19 +154,21 @@ public class CMakeAnalyzerTest extends BaseDBTestCase {
|
||||
*/
|
||||
@Test
|
||||
public void testAnalyzeCMakeListsOpenCV3rdParty() throws AnalysisException, DatabaseException {
|
||||
final Dependency result = new Dependency(BaseTest.getResourceAsFile(
|
||||
this, "cmake/opencv/3rdparty/ffmpeg/ffmpeg_version.cmake"));
|
||||
final Engine engine = new Engine(getSettings());
|
||||
analyzer.analyze(result, engine);
|
||||
assertProductEvidence(result, "libavcodec");
|
||||
assertVersionEvidence(result, "55.18.102");
|
||||
assertFalse("ALIASOF_ prefix shouldn't be present.",
|
||||
Pattern.compile("\\bALIASOF_\\w+").matcher(result.getEvidence(EvidenceType.PRODUCT).toString()).find());
|
||||
final Dependency[] dependencies = engine.getDependencies();
|
||||
assertEquals("Number of additional dependencies should be 4.", 4, dependencies.length);
|
||||
final Dependency last = dependencies[3];
|
||||
assertProductEvidence(last, "libavresample");
|
||||
assertVersionEvidence(last, "1.0.1");
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
final Dependency result = new Dependency(BaseTest.getResourceAsFile(
|
||||
this, "cmake/opencv/3rdparty/ffmpeg/ffmpeg_version.cmake"));
|
||||
|
||||
analyzer.analyze(result, engine);
|
||||
assertProductEvidence(result, "libavcodec");
|
||||
assertVersionEvidence(result, "55.18.102");
|
||||
assertFalse("ALIASOF_ prefix shouldn't be present.",
|
||||
Pattern.compile("\\bALIASOF_\\w+").matcher(result.getEvidence(EvidenceType.PRODUCT).toString()).find());
|
||||
final Dependency[] dependencies = engine.getDependencies();
|
||||
assertEquals("Number of additional dependencies should be 4.", 4, dependencies.length);
|
||||
final Dependency last = dependencies[3];
|
||||
assertProductEvidence(last, "libavresample");
|
||||
assertVersionEvidence(last, "1.0.1");
|
||||
}
|
||||
}
|
||||
|
||||
private void assertVersionEvidence(Dependency result, String version) {
|
||||
|
||||
@@ -84,12 +84,11 @@ public class CPEAnalyzerIT extends BaseDBTestCase {
|
||||
*/
|
||||
@Test
|
||||
public void testDetermineCPE_full() throws Exception {
|
||||
//update needs to be performed so that xtream can be tested
|
||||
Engine e = new Engine(getSettings());
|
||||
e.doUpdates();
|
||||
|
||||
CPEAnalyzer cpeAnalyzer = new CPEAnalyzer();
|
||||
try {
|
||||
try (Engine e = new Engine(getSettings())) {
|
||||
//update needs to be performed so that xtream can be tested
|
||||
e.doUpdates(true);
|
||||
|
||||
cpeAnalyzer.initialize(getSettings());
|
||||
cpeAnalyzer.prepare(e);
|
||||
FileNameAnalyzer fnAnalyzer = new FileNameAnalyzer();
|
||||
@@ -113,7 +112,6 @@ public class CPEAnalyzerIT extends BaseDBTestCase {
|
||||
callDetermineCPE_full("jaxb-xercesImpl-1.5.jar", null, cpeAnalyzer, fnAnalyzer, jarAnalyzer, hAnalyzer, fp);
|
||||
callDetermineCPE_full("ehcache-core-2.2.0.jar", null, cpeAnalyzer, fnAnalyzer, jarAnalyzer, hAnalyzer, fp);
|
||||
callDetermineCPE_full("xstream-1.4.8.jar", "cpe:/a:x-stream:xstream:1.4.8", cpeAnalyzer, fnAnalyzer, jarAnalyzer, hAnalyzer, fp);
|
||||
|
||||
} finally {
|
||||
cpeAnalyzer.close();
|
||||
}
|
||||
@@ -124,7 +122,8 @@ public class CPEAnalyzerIT extends BaseDBTestCase {
|
||||
*
|
||||
* @throws Exception is thrown when an exception occurs
|
||||
*/
|
||||
public void callDetermineCPE_full(String depName, String expResult, CPEAnalyzer cpeAnalyzer, FileNameAnalyzer fnAnalyzer, JarAnalyzer jarAnalyzer, HintAnalyzer hAnalyzer, FalsePositiveAnalyzer fp) throws Exception {
|
||||
public void callDetermineCPE_full(String depName, String expResult, CPEAnalyzer cpeAnalyzer, FileNameAnalyzer fnAnalyzer,
|
||||
JarAnalyzer jarAnalyzer, HintAnalyzer hAnalyzer, FalsePositiveAnalyzer fp) throws Exception {
|
||||
|
||||
//File file = new File(this.getClass().getClassLoader().getResource(depName).getPath());
|
||||
File file = BaseTest.getResourceAsFile(this, depName);
|
||||
@@ -197,35 +196,35 @@ public class CPEAnalyzerIT extends BaseDBTestCase {
|
||||
hintAnalyzer.analyze(spring3, null);
|
||||
|
||||
CPEAnalyzer instance = new CPEAnalyzer();
|
||||
Engine engine = new Engine(getSettings());
|
||||
engine.openDatabase();
|
||||
instance.initialize(getSettings());
|
||||
instance.prepare(engine);
|
||||
instance.determineCPE(commonValidator);
|
||||
instance.determineCPE(struts);
|
||||
instance.determineCPE(spring);
|
||||
instance.determineCPE(spring3);
|
||||
instance.close();
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
engine.openDatabase(true, true);
|
||||
instance.initialize(getSettings());
|
||||
instance.prepare(engine);
|
||||
instance.determineCPE(commonValidator);
|
||||
instance.determineCPE(struts);
|
||||
instance.determineCPE(spring);
|
||||
instance.determineCPE(spring3);
|
||||
instance.close();
|
||||
|
||||
String expResult = "cpe:/a:apache:struts:2.1.2";
|
||||
String expResult = "cpe:/a:apache:struts:2.1.2";
|
||||
|
||||
for (Identifier i : commonValidator.getIdentifiers()) {
|
||||
assertFalse("Apache Common Validator - found a CPE identifier?", "cpe".equals(i.getType()));
|
||||
}
|
||||
|
||||
assertTrue("Incorrect match size - struts", struts.getIdentifiers().size() >= 1);
|
||||
boolean found = false;
|
||||
for (Identifier i : struts.getIdentifiers()) {
|
||||
if (expResult.equals(i.getValue())) {
|
||||
found = true;
|
||||
break;
|
||||
for (Identifier i : commonValidator.getIdentifiers()) {
|
||||
assertFalse("Apache Common Validator - found a CPE identifier?", "cpe".equals(i.getType()));
|
||||
}
|
||||
}
|
||||
assertTrue("Incorrect match - struts", found);
|
||||
assertTrue("Incorrect match size - spring3 - " + spring3.getIdentifiers().size(), spring3.getIdentifiers().size() >= 1);
|
||||
|
||||
jarAnalyzer.close();
|
||||
engine.close();
|
||||
assertTrue("Incorrect match size - struts", struts.getIdentifiers().size() >= 1);
|
||||
boolean found = false;
|
||||
for (Identifier i : struts.getIdentifiers()) {
|
||||
if (expResult.equals(i.getValue())) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
assertTrue("Incorrect match - struts", found);
|
||||
assertTrue("Incorrect match size - spring3 - " + spring3.getIdentifiers().size(), spring3.getIdentifiers().size() >= 1);
|
||||
|
||||
jarAnalyzer.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -241,13 +240,13 @@ public class CPEAnalyzerIT extends BaseDBTestCase {
|
||||
openssl.addEvidence(EvidenceType.VERSION, "test", "version", "1.0.1c", Confidence.HIGHEST);
|
||||
|
||||
CPEAnalyzer instance = new CPEAnalyzer();
|
||||
Engine engine = new Engine(getSettings());
|
||||
engine.openDatabase();
|
||||
instance.initialize(getSettings());
|
||||
instance.prepare(engine);
|
||||
instance.determineIdentifiers(openssl, "openssl", "openssl", Confidence.HIGHEST);
|
||||
instance.close();
|
||||
engine.close();
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
engine.openDatabase(true, true);
|
||||
instance.initialize(getSettings());
|
||||
instance.prepare(engine);
|
||||
instance.determineIdentifiers(openssl, "openssl", "openssl", Confidence.HIGHEST);
|
||||
instance.close();
|
||||
}
|
||||
|
||||
String expResult = "cpe:/a:openssl:openssl:1.0.1c";
|
||||
Identifier expIdentifier = new Identifier("cpe", expResult, expResult);
|
||||
@@ -258,7 +257,6 @@ public class CPEAnalyzerIT extends BaseDBTestCase {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
assertTrue("OpenSSL identifier not found", found);
|
||||
}
|
||||
|
||||
@@ -275,23 +273,23 @@ public class CPEAnalyzerIT extends BaseDBTestCase {
|
||||
String expProduct = "struts";
|
||||
|
||||
CPEAnalyzer instance = new CPEAnalyzer();
|
||||
Engine engine = new Engine(getSettings());
|
||||
engine.openDatabase();
|
||||
instance.initialize(getSettings());
|
||||
instance.prepare(engine);
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
engine.openDatabase(true, true);
|
||||
instance.initialize(getSettings());
|
||||
instance.prepare(engine);
|
||||
|
||||
Set<String> productWeightings = Collections.singleton("struts2");
|
||||
Set<String> vendorWeightings = Collections.singleton("apache");
|
||||
List<IndexEntry> result = instance.searchCPE(vendor, product, vendorWeightings, productWeightings);
|
||||
instance.close();
|
||||
Set<String> productWeightings = Collections.singleton("struts2");
|
||||
Set<String> vendorWeightings = Collections.singleton("apache");
|
||||
List<IndexEntry> result = instance.searchCPE(vendor, product, vendorWeightings, productWeightings);
|
||||
|
||||
boolean found = false;
|
||||
for (IndexEntry entry : result) {
|
||||
if (expVendor.equals(entry.getVendor()) && expProduct.equals(entry.getProduct())) {
|
||||
found = true;
|
||||
break;
|
||||
boolean found = false;
|
||||
for (IndexEntry entry : result) {
|
||||
if (expVendor.equals(entry.getVendor()) && expProduct.equals(entry.getProduct())) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
assertTrue("apache:struts was not identified", found);
|
||||
}
|
||||
assertTrue("apache:struts was not identified", found);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -99,12 +99,13 @@ public class ComposerLockAnalyzerTest extends BaseDBTestCase {
|
||||
*/
|
||||
@Test
|
||||
public void testAnalyzePackageJson() throws Exception {
|
||||
final Engine engine = new Engine(getSettings());
|
||||
final Dependency result = new Dependency(BaseTest.getResourceAsFile(this,
|
||||
"composer.lock"));
|
||||
analyzer.analyze(result, engine);
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
final Dependency result = new Dependency(BaseTest.getResourceAsFile(this,
|
||||
"composer.lock"));
|
||||
analyzer.analyze(result, engine);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test(expected = InitializationException.class)
|
||||
public void analyzerIsDisabledInCaseOfMissingMessageDigest() throws InitializationException {
|
||||
new MockUp<MessageDigest>() {
|
||||
@@ -113,13 +114,13 @@ public class ComposerLockAnalyzerTest extends BaseDBTestCase {
|
||||
throw new NoSuchAlgorithmException("SHA1 is missing");
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
analyzer = new ComposerLockAnalyzer();
|
||||
analyzer.setFilesMatched(true);
|
||||
analyzer.initialize(getSettings());
|
||||
assertTrue(analyzer.isEnabled());
|
||||
analyzer.prepare(null);
|
||||
|
||||
|
||||
assertFalse(analyzer.isEnabled());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -66,44 +66,45 @@ public class HintAnalyzerTest extends BaseDBTestCase {
|
||||
public void testAnalyze() throws Exception {
|
||||
//File guice = new File(this.getClass().getClassLoader().getResource("guice-3.0.jar").getPath());
|
||||
File guice = BaseTest.getResourceAsFile(this, "guice-3.0.jar");
|
||||
//Dependency guice = new Dependency(fileg);
|
||||
//Dependency guice = new EngineDependency(fileg);
|
||||
//File spring = new File(this.getClass().getClassLoader().getResource("spring-core-3.0.0.RELEASE.jar").getPath());
|
||||
File spring = BaseTest.getResourceAsFile(this, "spring-core-3.0.0.RELEASE.jar");
|
||||
//Dependency spring = new Dependency(files);
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, false);
|
||||
Engine engine = new Engine(getSettings());
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
|
||||
engine.scan(guice);
|
||||
engine.scan(spring);
|
||||
engine.analyzeDependencies();
|
||||
Dependency gdep = null;
|
||||
Dependency sdep = null;
|
||||
for (Dependency d : engine.getDependencies()) {
|
||||
if (d.getActualFile().equals(guice)) {
|
||||
gdep = d;
|
||||
} else if (d.getActualFile().equals(spring)) {
|
||||
sdep = d;
|
||||
engine.scan(guice);
|
||||
engine.scan(spring);
|
||||
engine.analyzeDependencies();
|
||||
Dependency gdep = null;
|
||||
Dependency sdep = null;
|
||||
for (Dependency d : engine.getDependencies()) {
|
||||
if (d.getActualFile().equals(guice)) {
|
||||
gdep = d;
|
||||
} else if (d.getActualFile().equals(spring)) {
|
||||
sdep = d;
|
||||
}
|
||||
}
|
||||
final Evidence springTest1 = new Evidence("hint analyzer", "product", "springsource_spring_framework", Confidence.HIGH);
|
||||
final Evidence springTest2 = new Evidence("hint analyzer", "vendor", "SpringSource", Confidence.HIGH);
|
||||
final Evidence springTest3 = new Evidence("hint analyzer", "vendor", "vmware", Confidence.HIGH);
|
||||
final Evidence springTest4 = new Evidence("hint analyzer", "product", "springsource_spring_framework", Confidence.HIGH);
|
||||
final Evidence springTest5 = new Evidence("hint analyzer", "vendor", "vmware", Confidence.HIGH);
|
||||
|
||||
assertFalse(gdep.contains(EvidenceType.PRODUCT, springTest1));
|
||||
assertFalse(gdep.contains(EvidenceType.VENDOR, springTest2));
|
||||
assertFalse(gdep.contains(EvidenceType.VENDOR, springTest3));
|
||||
assertFalse(gdep.contains(EvidenceType.PRODUCT, springTest4));
|
||||
assertFalse(gdep.contains(EvidenceType.VENDOR, springTest5));
|
||||
|
||||
assertTrue(sdep.contains(EvidenceType.PRODUCT, springTest1));
|
||||
assertTrue(sdep.contains(EvidenceType.VENDOR, springTest2));
|
||||
assertTrue(sdep.contains(EvidenceType.VENDOR, springTest3));
|
||||
//assertTrue(evidence.contains(springTest4));
|
||||
//assertTrue(evidence.contains(springTest5));
|
||||
}
|
||||
final Evidence springTest1 = new Evidence("hint analyzer", "product", "springsource_spring_framework", Confidence.HIGH);
|
||||
final Evidence springTest2 = new Evidence("hint analyzer", "vendor", "SpringSource", Confidence.HIGH);
|
||||
final Evidence springTest3 = new Evidence("hint analyzer", "vendor", "vmware", Confidence.HIGH);
|
||||
final Evidence springTest4 = new Evidence("hint analyzer", "product", "springsource_spring_framework", Confidence.HIGH);
|
||||
final Evidence springTest5 = new Evidence("hint analyzer", "vendor", "vmware", Confidence.HIGH);
|
||||
|
||||
assertFalse(gdep.contains(EvidenceType.PRODUCT, springTest1));
|
||||
assertFalse(gdep.contains(EvidenceType.VENDOR, springTest2));
|
||||
assertFalse(gdep.contains(EvidenceType.VENDOR, springTest3));
|
||||
assertFalse(gdep.contains(EvidenceType.PRODUCT, springTest4));
|
||||
assertFalse(gdep.contains(EvidenceType.VENDOR, springTest5));
|
||||
|
||||
assertTrue(sdep.contains(EvidenceType.PRODUCT, springTest1));
|
||||
assertTrue(sdep.contains(EvidenceType.VENDOR, springTest2));
|
||||
assertTrue(sdep.contains(EvidenceType.VENDOR, springTest3));
|
||||
//assertTrue(evidence.contains(springTest4));
|
||||
//assertTrue(evidence.contains(springTest5));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -189,9 +189,10 @@ public class JarAnalyzerTest extends BaseTest {
|
||||
Dependency actualJarFile = new Dependency();
|
||||
actualJarFile.setActualFilePath(BaseTest.getResourceAsFile(this, "avro-ipc-1.5.0.jar").getAbsolutePath());
|
||||
actualJarFile.setFileName("avro-ipc-1.5.0.jar");
|
||||
Engine engine = new Engine(getSettings());
|
||||
engine.setDependencies(Arrays.asList(macOSMetaDataFile, actualJarFile));
|
||||
instance.analyzeDependency(macOSMetaDataFile, engine);
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
engine.setDependencies(Arrays.asList(macOSMetaDataFile, actualJarFile));
|
||||
instance.analyzeDependency(macOSMetaDataFile, engine);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -201,8 +202,9 @@ public class JarAnalyzerTest extends BaseTest {
|
||||
textFileWithJarExtension
|
||||
.setActualFilePath(BaseTest.getResourceAsFile(this, "textFileWithJarExtension.jar").getAbsolutePath());
|
||||
textFileWithJarExtension.setFileName("textFileWithJarExtension.jar");
|
||||
Engine engine = new Engine(getSettings());
|
||||
engine.setDependencies(Collections.singletonList(textFileWithJarExtension));
|
||||
instance.analyzeDependency(textFileWithJarExtension, engine);
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
engine.setDependencies(Collections.singletonList(textFileWithJarExtension));
|
||||
instance.analyzeDependency(textFileWithJarExtension, engine);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -150,7 +150,7 @@ public class RubyBundleAuditAnalyzerIT extends BaseDBTestCase {
|
||||
@Test
|
||||
public void testAddCriticalityToVulnerability() throws AnalysisException, DatabaseException {
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
engine.doUpdates();
|
||||
engine.doUpdates(true);
|
||||
analyzer.prepare(engine);
|
||||
|
||||
final Dependency result = new Dependency(BaseTest.getResourceAsFile(this,
|
||||
@@ -198,52 +198,19 @@ public class RubyBundleAuditAnalyzerIT extends BaseDBTestCase {
|
||||
*/
|
||||
@Test
|
||||
public void testDependenciesPath() throws AnalysisException, DatabaseException {
|
||||
final Engine engine = new Engine(getSettings());
|
||||
engine.scan(BaseTest.getResourceAsFile(this,
|
||||
"ruby/vulnerable/gems/rails-4.1.15/"));
|
||||
try {
|
||||
engine.analyzeDependencies();
|
||||
} catch (NullPointerException ex) {
|
||||
LOGGER.error("NPE", ex);
|
||||
fail(ex.getMessage());
|
||||
} catch (ExceptionCollection ex) {
|
||||
Assume.assumeNoException("Exception setting up RubyBundleAuditAnalyzer; bundle audit may not be installed, or property \"analyzer.bundle.audit.path\" may not be set.", ex);
|
||||
return;
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
try {
|
||||
engine.scan(BaseTest.getResourceAsFile(this, "ruby/vulnerable/gems/rails-4.1.15/"));
|
||||
engine.analyzeDependencies();
|
||||
} catch (NullPointerException ex) {
|
||||
LOGGER.error("NPE", ex);
|
||||
fail(ex.getMessage());
|
||||
} catch (ExceptionCollection ex) {
|
||||
Assume.assumeNoException("Exception setting up RubyBundleAuditAnalyzer; bundle audit may not be installed, or property \"analyzer.bundle.audit.path\" may not be set.", ex);
|
||||
return;
|
||||
}
|
||||
List<Dependency> dependencies = new ArrayList<>(Arrays.asList(engine.getDependencies()));
|
||||
LOGGER.info("{} dependencies found.", dependencies.size());
|
||||
}
|
||||
List<Dependency> dependencies = new ArrayList<>(Arrays.asList(engine.getDependencies()));
|
||||
LOGGER.info("{} dependencies found.", dependencies.size());
|
||||
//TODO before re-enablign the following add actual assertions.
|
||||
// Iterator<Dependency> dIterator = dependencies.iterator();
|
||||
// while (dIterator.hasNext()) {
|
||||
// Dependency dept = dIterator.next();
|
||||
// LOGGER.info("dept path: {}", dept.getActualFilePath());
|
||||
//
|
||||
// Set<Identifier> identifiers = dept.getIdentifiers();
|
||||
// Iterator<Identifier> idIterator = identifiers.iterator();
|
||||
// while (idIterator.hasNext()) {
|
||||
// Identifier id = idIterator.next();
|
||||
// LOGGER.info(" Identifier: {}, type={}, url={}, conf={}", id.getValue(), id.getType(), id.getUrl(), id.getConfidence());
|
||||
// }
|
||||
//
|
||||
// Set<Evidence> prodEv = dept.getProductEvidence().getEvidence();
|
||||
// Iterator<Evidence> it = prodEv.iterator();
|
||||
// while (it.hasNext()) {
|
||||
// Evidence e = it.next();
|
||||
// LOGGER.info(" prod: name={}, value={}, source={}, confidence={}", e.getName(), e.getValue(), e.getSource(), e.getConfidence());
|
||||
// }
|
||||
// Set<Evidence> versionEv = dept.getVersionEvidence().getEvidence();
|
||||
// Iterator<Evidence> vIt = versionEv.iterator();
|
||||
// while (vIt.hasNext()) {
|
||||
// Evidence e = vIt.next();
|
||||
// LOGGER.info(" version: name={}, value={}, source={}, confidence={}", e.getName(), e.getValue(), e.getSource(), e.getConfidence());
|
||||
// }
|
||||
//
|
||||
// Set<Evidence> vendorEv = dept.getVendorEvidence().getEvidence();
|
||||
// Iterator<Evidence> vendorIt = vendorEv.iterator();
|
||||
// while (vendorIt.hasNext()) {
|
||||
// Evidence e = vendorIt.next();
|
||||
// LOGGER.info(" vendor: name={}, value={}, source={}, confidence={}", e.getName(), e.getValue(), e.getSource(), e.getConfidence());
|
||||
// }
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -74,24 +74,24 @@ public class VulnerabilitySuppressionAnalyzerIT extends BaseDBTestCase {
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, false);
|
||||
getSettings().setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, false);
|
||||
Engine engine = new Engine(getSettings());
|
||||
engine.scan(file);
|
||||
engine.analyzeDependencies();
|
||||
Dependency dependency = getDependency(engine, file);
|
||||
int cveSize = dependency.getVulnerabilities().size();
|
||||
int cpeSize = dependency.getIdentifiers().size();
|
||||
assertTrue(cveSize > 0);
|
||||
assertTrue(cpeSize > 0);
|
||||
getSettings().setString(Settings.KEYS.SUPPRESSION_FILE, suppression.getAbsolutePath());
|
||||
VulnerabilitySuppressionAnalyzer instance = new VulnerabilitySuppressionAnalyzer();
|
||||
instance.initialize(getSettings());
|
||||
instance.prepare(engine);
|
||||
instance.analyze(dependency, engine);
|
||||
cveSize = cveSize > 1 ? cveSize - 2 : 0;
|
||||
cpeSize = cpeSize > 0 ? cpeSize - 1 : 0;
|
||||
assertTrue(dependency.getVulnerabilities().size() == cveSize);
|
||||
assertTrue(dependency.getIdentifiers().size() == cpeSize);
|
||||
engine.close();
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
engine.scan(file);
|
||||
engine.analyzeDependencies();
|
||||
Dependency dependency = getDependency(engine, file);
|
||||
int cveSize = dependency.getVulnerabilities().size();
|
||||
int cpeSize = dependency.getIdentifiers().size();
|
||||
assertTrue(cveSize > 0);
|
||||
assertTrue(cpeSize > 0);
|
||||
getSettings().setString(Settings.KEYS.SUPPRESSION_FILE, suppression.getAbsolutePath());
|
||||
VulnerabilitySuppressionAnalyzer instance = new VulnerabilitySuppressionAnalyzer();
|
||||
instance.initialize(getSettings());
|
||||
instance.prepare(engine);
|
||||
instance.analyze(dependency, engine);
|
||||
cveSize = cveSize > 1 ? cveSize - 2 : 0;
|
||||
cpeSize = cpeSize > 0 ? cpeSize - 1 : 0;
|
||||
assertTrue(dependency.getVulnerabilities().size() == cveSize);
|
||||
assertTrue(dependency.getIdentifiers().size() == cpeSize);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -68,16 +68,13 @@ public class ReportGeneratorIT extends BaseDBTestCase {
|
||||
File jetty = BaseTest.getResourceAsFile(this, "org.mortbay.jetty.jar");
|
||||
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
Engine engine = new Engine(getSettings());
|
||||
|
||||
engine.scan(struts);
|
||||
engine.scan(axis);
|
||||
engine.scan(jetty);
|
||||
engine.analyzeDependencies();
|
||||
engine.writeReports("Test Report", "org.owasp", "dependency-check-core", "1.4.7", writeTo, "XML");
|
||||
|
||||
engine.close();
|
||||
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
engine.scan(struts);
|
||||
engine.scan(axis);
|
||||
engine.scan(jetty);
|
||||
engine.analyzeDependencies();
|
||||
engine.writeReports("Test Report", "org.owasp", "dependency-check-core", "1.4.7", writeTo, "XML");
|
||||
}
|
||||
InputStream xsdStream = ReportGenerator.class.getClassLoader().getResourceAsStream("schema/dependency-check.1.6.xsd");
|
||||
StreamSource xsdSource = new StreamSource(xsdStream);
|
||||
StreamSource xmlSource = new StreamSource(writeTo);
|
||||
|
||||
@@ -18,7 +18,7 @@ data.directory=[JAR]/data
|
||||
#if the filename has a %s it will be replaced with the current expected version
|
||||
data.file_name=dc.h2.db
|
||||
data.version=3.0
|
||||
data.connection_string=jdbc:h2:file:%s;MV_STORE=FALSE;AUTOCOMMIT=ON;LOCK_MODE=0;FILE_LOCK=NO
|
||||
data.connection_string=jdbc:h2:file:%s;MV_STORE=FALSE;AUTOCOMMIT=ON;
|
||||
#data.connection_string=jdbc:mysql://localhost:3306/dependencycheck
|
||||
|
||||
# user name and password for the database connection. The inherent case is to use H2.
|
||||
|
||||
@@ -16,4 +16,4 @@
|
||||
# Copyright (c) 2014 Jeremy Long. All Rights Reserved.
|
||||
#
|
||||
|
||||
invoker.goals = install ${project.groupId}:${project.artifactId}:${project.version}:check -X -T 12
|
||||
invoker.goals = install ${project.groupId}:${project.artifactId}:${project.version}:check -X -T 10
|
||||
|
||||
@@ -81,8 +81,6 @@ public class UpdateMojo extends BaseDependencyCheckMojo {
|
||||
throw new MojoExecutionException(msg, ex);
|
||||
}
|
||||
getLog().error(msg);
|
||||
} finally {
|
||||
getSettings().cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -92,18 +92,18 @@ public class BaseDependencyCheckMojoTest extends BaseTest {
|
||||
|
||||
boolean autoUpdate = getSettings().getBoolean(Settings.KEYS.AUTO_UPDATE);
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, false);
|
||||
Engine engine = new Engine(getSettings());
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
try (Engine engine = new Engine(getSettings())) {
|
||||
getSettings().setBoolean(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
|
||||
assertTrue(engine.getDependencies().length == 0);
|
||||
BaseDependencyCheckMojoImpl instance = new BaseDependencyCheckMojoImpl();
|
||||
try { //the mock above fails under some JDKs
|
||||
instance.scanArtifacts(project, engine);
|
||||
} catch (NullPointerException ex) {
|
||||
Assume.assumeNoException(ex);
|
||||
assertTrue(engine.getDependencies().length == 0);
|
||||
BaseDependencyCheckMojoImpl instance = new BaseDependencyCheckMojoImpl();
|
||||
try { //the mock above fails under some JDKs
|
||||
instance.scanArtifacts(project, engine);
|
||||
} catch (NullPointerException ex) {
|
||||
Assume.assumeNoException(ex);
|
||||
}
|
||||
assertFalse(engine.getDependencies().length == 0);
|
||||
}
|
||||
assertFalse(engine.getDependencies().length == 0);
|
||||
engine.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ engine.version.url=http://jeremylong.github.io/DependencyCheck/current.txt
|
||||
data.directory=[JAR]/data
|
||||
data.file_name=dc.h2.db
|
||||
data.version=3.0
|
||||
data.connection_string=jdbc:h2:file:%s;MV_STORE=FALSE;AUTOCOMMIT=ON;LOCK_MODE=0;FILE_LOCK=NO
|
||||
data.connection_string=jdbc:h2:file:%s;MV_STORE=FALSE;AUTOCOMMIT=ON;
|
||||
#data.connection_string=jdbc:h2:file:%s;AUTO_SERVER=TRUE;AUTOCOMMIT=ON;
|
||||
#data.connection_string=jdbc:mysql://localhost:3306/dependencycheck
|
||||
|
||||
|
||||
Reference in New Issue
Block a user