Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

task-2628: Update Logging to SLF4j in datawave-warehouse/ingest-core pt2 #2707

Open
wants to merge 7 commits into
base: integration
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@
import java.util.Map;

import org.apache.commons.cli.Option;
import org.apache.log4j.Logger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Builds a sequence of SnowflakeUIDs for a particular "machine" instance, which is based on a unique combination of host, process, and process thread.
Expand All @@ -18,7 +19,7 @@ public class SnowflakeUIDBuilder extends AbstractUIDBuilder<SnowflakeUID> {

private static final BigInteger UNDEFINED_MACHINE_ID = BigInteger.valueOf(-1);
private static final BigInteger UNDEFINED_SNOWFLAKE = BigInteger.valueOf(-1);
private static final Logger LOGGER = Logger.getLogger(SnowflakeUIDBuilder.class);
private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeUIDBuilder.class);

private final BigInteger mid;

Expand Down Expand Up @@ -259,10 +260,7 @@ protected static int newMachineId(final Map<String,Option> options) {
try {
hostId = Integer.parseInt(option.getValue());
} catch (final Exception e) {
if (LOGGER.isDebugEnabled()) {
final String message = "Invalid " + HOST_INDEX_OPT + ": " + option;
LOGGER.warn(message, e);
}
LOGGER.warn("Invalid {}: {}", HOST_INDEX_OPT, option);
}
}

Expand All @@ -271,10 +269,7 @@ protected static int newMachineId(final Map<String,Option> options) {
try {
processId = Integer.parseInt(option.getValue());
} catch (final Exception e) {
if (LOGGER.isDebugEnabled()) {
final String message = "Invalid " + PROCESS_INDEX_OPT + ": " + option;
LOGGER.warn(message, e);
}
LOGGER.warn("Invalid {}: {}", PROCESS_INDEX_OPT, option);
}
}

Expand All @@ -283,20 +278,14 @@ protected static int newMachineId(final Map<String,Option> options) {
try {
threadId = Integer.parseInt(option.getValue());
} catch (final Exception e) {
if (LOGGER.isDebugEnabled()) {
final String message = "Invalid " + THREAD_INDEX_OPT + ": " + option;
LOGGER.warn(message, e);
}
LOGGER.warn("Invalid {}: {}", THREAD_INDEX_OPT, option);
}
}

try {
machineId = validateMachineIds(hostId, processId, threadId).intValue();
} catch (Exception e) {
if (LOGGER.isDebugEnabled()) {
final String message = "Unable to generate Snowflake machine ID";
LOGGER.warn(message, e);
}
LOGGER.warn("Unable to generate Snowflake machine ID", e);
}

return machineId;
Expand Down Expand Up @@ -387,8 +376,8 @@ private long validateTimestamp(long timestamp) {
}

if (timestamp <= this.previousTid) {
LOGGER.warn("Current tid is less than the previous. This could cause uid collisions.\n" + "Mid: " + mid + ", Timestamp: " + timestamp
+ ", Previous: " + previousTid + ", System Time: " + System.currentTimeMillis());
LOGGER.warn("Current tid is less than the previous. This could cause uid collisions.\nMid: {}, Timestamp: {}, Previous: {}, System Time: {}", mid,
timestamp, previousTid, System.currentTimeMillis());
timestamp = this.previousTid + 1;
}

Expand All @@ -404,12 +393,9 @@ private void storeTimestamp() {
if (ZkSnowflakeCache.isInitialized()) {
try {
ZkSnowflakeCache.store(mid, this.previousTid);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Caching ZK ts: " + this.previousTid + ", mid: " + this.mid);
}

LOGGER.debug("Caching ZK ts: {}, mid: {}", this.previousTid, this.mid);
} catch (Exception e) {
LOGGER.error("Unable to store snowflake id from zookeeper for " + mid, e);
LOGGER.error("Unable to store snowflake id from zookeeper for {}", mid, e);
throw new RuntimeException(e);
}
}
Expand All @@ -421,14 +407,10 @@ private long initializeTimestamp() {
if (ZkSnowflakeCache.isInitialized()) {
try {
lastCachedTid = ZkSnowflakeCache.getLastCachedTid(this.mid);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Getting ZK ts: " + lastCachedTid + " mid: " + this.mid);
}

LOGGER.debug("Getting ZK ts: {}, mid: {}", lastCachedTid, this.mid);
} catch (Exception e) {
LOGGER.error("Unable to retrieve snowflake id from zookeeper for " + mid, e);
LOGGER.error("Unable to retrieve snowflake id from zookeeper for {}", mid, e);
throw new RuntimeException(e);

}
}
if (lastCachedTid > 0) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
package datawave.ingest;

import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import datawave.ingest.mapreduce.job.TableConfigurationUtil;

public class TableCreator {

private static final Configuration config = new Configuration();

private static final Logger log = Logger.getLogger(TableCreator.class);
private static final Logger log = LoggerFactory.getLogger(TableCreator.class);

public static void main(String[] args) {
Configuration conf = OptionsParser.parseArguments(args, config);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import datawave.ingest.data.config.ingest.AccumuloHelper;

Expand All @@ -23,7 +24,7 @@ public abstract class BaseHdfsFileCacheUtil {
private static final int MAX_RETRIES = 3;
protected short cacheReplicas = 3;

private static final Logger log = Logger.getLogger(BaseHdfsFileCacheUtil.class);
private static final Logger log = LoggerFactory.getLogger(BaseHdfsFileCacheUtil.class);

public BaseHdfsFileCacheUtil(Configuration conf) {
Validate.notNull(conf, "Configuration object passed in null");
Expand All @@ -48,7 +49,7 @@ public void read() throws IOException {
while (retry && attempts <= MAX_RETRIES) {
attempts++;

log.info("Reading cache at " + this.cacheFilePath);
log.info("Reading cache at {}", this.cacheFilePath);
try (BufferedReader in = new BufferedReader(new InputStreamReader(FileSystem.get(this.cacheFilePath.toUri(), conf).open(this.cacheFilePath)))) {
readCache(in);
retry = false;
Expand Down Expand Up @@ -80,7 +81,7 @@ public void update() {
cleanup(fs, tempFile);
}

log.error("Unable to update cache file " + cacheFilePath + ". " + e.getMessage(), e);
log.error("Unable to update cache file {}. {}", cacheFilePath, e.getMessage(), e);
}

}
Expand All @@ -99,18 +100,18 @@ public void createCacheFile(FileSystem fs, Path tmpCacheFile) {
throw new IOException("Failed to rename temporary cache file");
}
} catch (Exception e) {
log.warn("Unable to rename " + tmpCacheFile + " to " + this.cacheFilePath + "probably because somebody else replaced it ", e);
log.warn("Unable to rename {} to {} probably because somebody else replaced it", tmpCacheFile, this.cacheFilePath, e);
cleanup(fs, tmpCacheFile);
}
log.info("Updated " + cacheFilePath);
log.info("Updated {}", cacheFilePath);

}

protected void cleanup(FileSystem fs, Path tmpCacheFile) {
try {
fs.delete(tmpCacheFile, false);
} catch (Exception e) {
log.error("Unable to clean up " + tmpCacheFile, e);
log.error("Unable to clean up {}", tmpCacheFile, e);
}
}

Expand All @@ -132,7 +133,7 @@ public Path createTempFile(FileSystem fs) throws IOException {
do {
Path parentDirectory = this.cacheFilePath.getParent();
String fileName = this.cacheFilePath.getName() + "." + count;
log.info("Attempting to create " + fileName + "under " + parentDirectory);
log.info("Attempting to create {} under {}", fileName, parentDirectory);
tmpCacheFile = new Path(parentDirectory, fileName);
count++;
} while (!fs.createNewFile(tmpCacheFile));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class TableConfigCache extends BaseHdfsFileCacheUtil {

Expand All @@ -27,7 +28,7 @@ public class TableConfigCache extends BaseHdfsFileCacheUtil {

private static final Object lock = new Object();

protected static final Logger log = Logger.getLogger("datawave.ingest");
protected static final Logger log = LoggerFactory.getLogger("datawave.ingest");

private TableConfigCache(Configuration conf) {
super(conf);
Expand Down Expand Up @@ -58,24 +59,24 @@ public boolean isInitialized() {
public void writeCacheFile(FileSystem fs, Path tmpCacheFile) throws IOException {
Map<String,Map<String,String>> tempValidationMap = configMap;

log.info("Writing to temp file " + tmpCacheFile.getName());
log.info("Writing to temp file {}", tmpCacheFile.getName());
try (PrintStream out = new PrintStream(new BufferedOutputStream(fs.create(tmpCacheFile)), false, "UTF-8")) {
for (Map.Entry<String,Map<String,String>> table : configMap.entrySet()) {
for (Map.Entry tableProp : table.getValue().entrySet()) {
out.println(table.getKey() + this.delimiter + tableProp.getKey() + this.delimiter + tableProp.getValue());
}
}
} catch (IOException e) {
log.error("Unable to write cache file " + tmpCacheFile, e);
log.error("Unable to write cache file {}", tmpCacheFile, e);
throw e;
}

// validate temp file
log.info("Validating file: " + tmpCacheFile.getName());
log.info("Validating file: {}", tmpCacheFile.getName());
try (BufferedReader in = new BufferedReader(new InputStreamReader(FileSystem.get(tmpCacheFile.toUri(), conf).open(tmpCacheFile)))) {
readCache(in);
} catch (IOException ex) {
log.error("Error reading cache temp file: " + tmpCacheFile, ex);
log.error("Error reading cache temp file: {}", tmpCacheFile, ex);
throw ex;
}

Expand Down Expand Up @@ -138,7 +139,7 @@ public Map<String,String> getTableProperties(String tableName) throws IOExceptio
read();
}
if (null == this.configMap.get(tableName) || this.configMap.get(tableName).isEmpty()) {
log.error("No accumulo config cache for " + tableName + ". Please generate the accumulo config cache after ensuring the table exists.");
log.error("No accumulo config cache for {}. Please generate the accumulo config cache after ensuring the table exists.", tableName);
}
return this.configMap.get(tableName);

Expand Down
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
package datawave.ingest.config;

import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import datawave.ingest.OptionsParser;
import datawave.ingest.mapreduce.job.TableConfigurationUtil;

public class TableConfigCacheGenerator {
protected static final Configuration config = new Configuration();

protected static final Logger log = Logger.getLogger(TableConfigCache.class);
protected static final Logger log = LoggerFactory.getLogger(TableConfigCache.class);

public static void main(String[] args) {

Expand All @@ -19,7 +20,7 @@ public static void main(String[] args) {
TableConfigurationUtil tcu = new TableConfigurationUtil(conf);
tcu.updateCacheFile();
} catch (Exception e) {
log.error("Unable to generate accumulo config cache " + e.getMessage());
log.error("Unable to generate accumulo config cache {}", e.getMessage());
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,12 @@

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.log4j.Logger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;

import datawave.core.common.logging.ThreadConfigurableLogger;
import datawave.ingest.data.config.ConfigurationHelper;
import datawave.ingest.data.config.DataTypeOverrideHelper;
import datawave.ingest.data.config.filter.KeyValueFilter;
Expand All @@ -28,7 +28,7 @@

public class TypeRegistry extends HashMap<String,Type> {

private static final Logger log = ThreadConfigurableLogger.getLogger(TypeRegistry.class);
private static final Logger log = LoggerFactory.getLogger(TypeRegistry.class);

public static final String ALL_PREFIX = "all";

Expand Down Expand Up @@ -73,7 +73,7 @@ public static TypeRegistry getInstance(Configuration config) {
}

/**
* Helps determine whether or not the registry instance has been instantiated.
* Helps determine whether the registry instance has been instantiated.
*
* @return true if the registry exists, false otherwise
*/
Expand Down Expand Up @@ -189,13 +189,13 @@ private TypeRegistry(Configuration config) {
try {
helperClassName = ConfigurationHelper.isNull(config, typeName + INGEST_HELPER, String.class);
} catch (IllegalArgumentException e) {
log.debug("No helper class defined for type: " + typeName);
log.debug("No helper class defined for type: {}", typeName);
}
String readerClassName = null;
try {
readerClassName = ConfigurationHelper.isNull(config, typeName + RAW_READER, String.class);
} catch (IllegalArgumentException e) {
log.debug("No reader class defined for type: " + typeName);
log.debug("No reader class defined for type: {}", typeName);
}
String[] handlerClassNames = null;
try {
Expand All @@ -207,7 +207,7 @@ private TypeRegistry(Configuration config) {
.asList(StringUtils.trimAndRemoveEmptyStrings(ConfigurationHelper.isNull(config, EXCLUDED_HANDLER_CLASSES, String[].class)));
handlerClassNames = getClassnamesWithoutExclusions(handlerClassNames, exclusions);
} catch (IllegalArgumentException e) {
log.debug("No handler classes defined for type: " + typeName);
log.debug("No handler classes defined for type: {}", typeName);
}

String[] filterClassNames = null;
Expand All @@ -217,7 +217,7 @@ private TypeRegistry(Configuration config) {
StringUtils.trimAndRemoveEmptyStrings(ConfigurationHelper.isNull(config, typeName + FILTER_CLASSES, String[].class)));
filterPriority = config.getInt(typeName + FILTER_PRIORITY, Integer.MAX_VALUE);
} catch (IllegalArgumentException e) {
log.debug("No filter classes defined for type: " + typeName);
log.debug("No filter classes defined for type: {}", typeName);
}

String outputName = config.get(typeName + OUTPUT_NAME, typeName);
Expand All @@ -236,27 +236,27 @@ private TypeRegistry(Configuration config) {
// performing `configurationKey.split(".")[0]`. Using a period inside datatype name muddies later code
// due to the manner than Hadoop Configurations operate.
if (typeName.indexOf('.') != -1) {
log.error("Datatypes ('" + INGEST_DATA_TYPES + "') cannot contain a period. Offending datatype: '" + typeName + "'");
log.error("Datatypes ({}) cannot contain a period. Offending datatype: {}", INGEST_DATA_TYPES, typeName);
throw new IllegalArgumentException(
"Datatypes ('" + INGEST_DATA_TYPES + "') cannot contain a period. Offending datatype: '" + typeName + "'");
}

Type t = new Type(typeName, outputName, helperClass, readerClass, handlerClassNames, filterPriority, filterClassNames);
log.debug("Registered type " + t);
log.debug("Registered type {}", t);
this.put(typeName, t);

if (null != config.get(typeName + DataTypeOverrideHelper.Properties.DATA_TYPE_VALUES)) {
for (String type : config.getStrings(typeName + DataTypeOverrideHelper.Properties.DATA_TYPE_VALUES)) {
outputName = config.get(type + OUTPUT_NAME, outputName);
t = new Type(type, outputName, helperClass, readerClass, handlerClassNames, filterPriority, filterClassNames);
log.debug("Registered child type:" + type);
log.debug("Registered child type: {}", type);
this.put(type, t);
}
}
}

} catch (ClassNotFoundException cnfe) {
log.error("Unable to create supporting class for type " + typeName, cnfe);
log.error("Unable to create supporting class for type {}", typeName, cnfe);
}

}
Expand Down
Loading
Loading