fixed logging statements to use slf4j format syntax instead of String.format syntax

This commit is contained in:
Jeremy Long
2015-09-08 06:31:59 -04:00
parent 4b2b4e5482
commit fdbec176fa
4 changed files with 22 additions and 24 deletions

View File

@@ -89,16 +89,16 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
*/ */
private static final Set<String> ZIPPABLES = newHashSet("zip", "ear", "war", "jar", "sar", "apk", "nupkg"); private static final Set<String> ZIPPABLES = newHashSet("zip", "ear", "war", "jar", "sar", "apk", "nupkg");
/** /**
* The set of file extensions supported by this analyzer. Note for developers, any additions to this list will need * The set of file extensions supported by this analyzer. Note for developers, any additions to this list will need to be
* to be explicitly handled in {@link #extractFiles(File, File, Engine)}. * explicitly handled in {@link #extractFiles(File, File, Engine)}.
*/ */
private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz", "bz2", "tbz2"); private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz", "bz2", "tbz2");
/** /**
* Detects files with extensions to remove from the engine's collection of dependencies. * Detects files with extensions to remove from the engine's collection of dependencies.
*/ */
private static final FileFilter REMOVE_FROM_ANALYSIS = private static final FileFilter REMOVE_FROM_ANALYSIS
FileFilterBuilder.newInstance().addExtensions("zip", "tar", "gz", "tgz", "bz2", "tbz2").build(); = FileFilterBuilder.newInstance().addExtensions("zip", "tar", "gz", "tgz", "bz2", "tbz2").build();
static { static {
final String additionalZipExt = Settings.getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS); final String additionalZipExt = Settings.getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS);
@@ -195,7 +195,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
* and added to the list of dependencies within the engine. * and added to the list of dependencies within the engine.
* *
* @param dependency the dependency to analyze * @param dependency the dependency to analyze
* @param engine the engine scanning * @param engine the engine scanning
* @throws AnalysisException thrown if there is an analysis exception * @throws AnalysisException thrown if there is an analysis exception
*/ */
@Override @Override
@@ -239,7 +239,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
final File tdir = getNextTempDirectory(); final File tdir = getNextTempDirectory();
final String fileName = dependency.getFileName(); final String fileName = dependency.getFileName();
LOGGER.info(String.format("The zip file '%s' appears to be a JAR file, making a copy and analyzing it as a JAR.", fileName)); LOGGER.info("The zip file '{}' appears to be a JAR file, making a copy and analyzing it as a JAR.", fileName);
final File tmpLoc = new File(tdir, fileName.substring(0, fileName.length() - 3) + "jar"); final File tmpLoc = new File(tdir, fileName.substring(0, fileName.length() - 3) + "jar");
try { try {
@@ -286,7 +286,6 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
return newDependencies; return newDependencies;
} }
/** /**
* Retrieves the next temporary directory to extract an archive too. * Retrieves the next temporary directory to extract an archive too.
* *
@@ -310,9 +309,9 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* Extracts the contents of an archive into the specified directory. * Extracts the contents of an archive into the specified directory.
* *
* @param archive an archive file such as a WAR or EAR * @param archive an archive file such as a WAR or EAR
* @param destination a directory to extract the contents to * @param destination a directory to extract the contents to
* @param engine the scanning engine * @param engine the scanning engine
* @throws AnalysisException thrown if the archive is not found * @throws AnalysisException thrown if the archive is not found
*/ */
private void extractFiles(File archive, File destination, Engine engine) throws AnalysisException { private void extractFiles(File archive, File destination, Engine engine) throws AnalysisException {
@@ -358,9 +357,9 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* Extracts files from an archive. * Extracts files from an archive.
* *
* @param input the archive to extract files from * @param input the archive to extract files from
* @param destination the location to write the files too * @param destination the location to write the files too
* @param engine the dependency-check engine * @param engine the dependency-check engine
* @throws ArchiveExtractionException thrown if there is an exception extracting files from the archive * @throws ArchiveExtractionException thrown if there is an exception extracting files from the archive
*/ */
private void extractArchive(ArchiveInputStream input, File destination, Engine engine) throws ArchiveExtractionException { private void extractArchive(ArchiveInputStream input, File destination, Engine engine) throws ArchiveExtractionException {
@@ -422,7 +421,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
* Decompresses a file. * Decompresses a file.
* *
* @param inputStream the compressed file * @param inputStream the compressed file
* @param outputFile the location to write the decompressed file * @param outputFile the location to write the decompressed file
* @throws ArchiveExtractionException thrown if there is an exception decompressing the file * @throws ArchiveExtractionException thrown if there is an exception decompressing the file
*/ */
private void decompressFile(CompressorInputStream inputStream, File outputFile) throws ArchiveExtractionException { private void decompressFile(CompressorInputStream inputStream, File outputFile) throws ArchiveExtractionException {

View File

@@ -167,7 +167,7 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
dependency.getProductEvidence().addEvidence(name, "Project", dependency.getProductEvidence().addEvidence(name, "Project",
group, Confidence.HIGH); group, Confidence.HIGH);
} }
LOGGER.debug(String.format("Found %d matches.", count)); LOGGER.debug("Found {} matches.", count);
analyzeSetVersionCommand(dependency, engine, contents); analyzeSetVersionCommand(dependency, engine, contents);
} }
} }
@@ -178,9 +178,8 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
int count = 0; int count = 0;
while (m.find()) { while (m.find()) {
count++; count++;
LOGGER.debug(String.format( LOGGER.debug("Found project command match with {} groups: {}",
"Found project command match with %d groups: %s", m.groupCount(), m.group(0));
m.groupCount(), m.group(0)));
String product = m.group(1); String product = m.group(1);
final String version = m.group(2); final String version = m.group(2);
LOGGER.debug("Group 1: " + product); LOGGER.debug("Group 1: " + product);

View File

@@ -247,7 +247,7 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
} }
} catch (IllegalArgumentException iae) { } catch (IllegalArgumentException iae) {
//dependency.addAnalysisException(new AnalysisException("Invalid SHA-1")); //dependency.addAnalysisException(new AnalysisException("Invalid SHA-1"));
LOGGER.info(String.format("invalid sha-1 hash on %s", dependency.getFileName())); LOGGER.info("invalid sha-1 hash on {}", dependency.getFileName());
} catch (FileNotFoundException fnfe) { } catch (FileNotFoundException fnfe) {
//dependency.addAnalysisException(new AnalysisException("Artifact not found on repository")); //dependency.addAnalysisException(new AnalysisException("Artifact not found on repository"));
LOGGER.debug("Artifact not found in repository '{}'", dependency.getFileName()); LOGGER.debug("Artifact not found in repository '{}'", dependency.getFileName());

View File

@@ -40,8 +40,8 @@ import javax.json.JsonString;
import javax.json.JsonValue; import javax.json.JsonValue;
/** /**
* Used to analyze Node Package Manager (npm) package.json files, and collect information that can be used to determine * Used to analyze Node Package Manager (npm) package.json files, and collect information that can be used to determine the
* the associated CPE. * associated CPE.
* *
* @author Dale Visser <dvisser@ida.org> * @author Dale Visser <dvisser@ida.org>
*/ */
@@ -66,8 +66,8 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* Filter that detects files named "package.json". * Filter that detects files named "package.json".
*/ */
private static final FileFilter PACKAGE_JSON_FILTER = private static final FileFilter PACKAGE_JSON_FILTER
FileFilterBuilder.newInstance().addFilenames(PACKAGE_JSON).build(); = FileFilterBuilder.newInstance().addFilenames(PACKAGE_JSON).build();
/** /**
* Returns the FileFilter * Returns the FileFilter
@@ -136,7 +136,7 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
productEvidence.addEvidence(PACKAGE_JSON, "name", valueString, Confidence.HIGHEST); productEvidence.addEvidence(PACKAGE_JSON, "name", valueString, Confidence.HIGHEST);
vendorEvidence.addEvidence(PACKAGE_JSON, "name_project", String.format("%s_project", valueString), Confidence.LOW); vendorEvidence.addEvidence(PACKAGE_JSON, "name_project", String.format("%s_project", valueString), Confidence.LOW);
} else { } else {
LOGGER.warn("JSON value not string as expected: %s", value); LOGGER.warn("JSON value not string as expected: {}", value);
} }
} }
addToEvidence(json, productEvidence, "description"); addToEvidence(json, productEvidence, "description");
@@ -166,11 +166,11 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
((JsonString) subValue).getString(), ((JsonString) subValue).getString(),
Confidence.HIGHEST); Confidence.HIGHEST);
} else { } else {
LOGGER.warn("JSON sub-value not string as expected: %s", subValue); LOGGER.warn("JSON sub-value not string as expected: {}", subValue);
} }
} }
} else { } else {
LOGGER.warn("JSON value not string or JSON object as expected: %s", value); LOGGER.warn("JSON value not string or JSON object as expected: {}", value);
} }
} }
} }