diff --git a/warehouse/core/src/main/java/datawave/data/hash/AbstractUIDBuilder.java b/warehouse/core/src/main/java/datawave/data/hash/AbstractUIDBuilder.java index 8a8f6357ae5..ab7038bf572 100644 --- a/warehouse/core/src/main/java/datawave/data/hash/AbstractUIDBuilder.java +++ b/warehouse/core/src/main/java/datawave/data/hash/AbstractUIDBuilder.java @@ -88,13 +88,13 @@ private void configure(final Configuration config, final Map opti if (options.size() < 4) { uidType = HashUID.class.getSimpleName(); LOGGER.warn("Unable to configure UID type {}", SnowflakeUID.class.getSimpleName(), - new IllegalArgumentException("Insufficient number of 'Snowflake' options: " + options)); + new IllegalArgumentException("Insufficient number of 'Snowflake' options: " + options)); } } else if (!HashUID.class.getSimpleName().equals(uidType)) { final String invalidType = uidType; uidType = HashUID.class.getSimpleName(); LOGGER.warn("Defaulting configuration to UID type {} due to unspecified value", HashUID.class.getSimpleName(), - new IllegalArgumentException("Unrecognized UID type: " + invalidType)); + new IllegalArgumentException("Unrecognized UID type: " + invalidType)); } config.set(CONFIG_UID_TYPE_KEY, uidType, this.getClass().getName()); @@ -102,12 +102,12 @@ private void configure(final Configuration config, final Map opti if (SnowflakeUID.class.getSimpleName().equals(uidType)) { int machineId = SnowflakeUIDBuilder.newMachineId(options); if (machineId >= 0) { - LOGGER.debug("Setting configuration {} to use {} based on UID type {} and machine ID {}", - config.hashCode(), SnowflakeUIDBuilder.class.getSimpleName(), uidType, machineId); + LOGGER.debug("Setting configuration {} to use {} based on UID type {} and machine ID {}", config.hashCode(), + SnowflakeUIDBuilder.class.getSimpleName(), uidType, machineId); config.setInt(CONFIG_MACHINE_ID_KEY, machineId); } else if (LOGGER.isDebugEnabled()) { - LOGGER.warn("Unable to set configuration to use {} based on UID type {} with machine ID {}", - SnowflakeUIDBuilder.class.getSimpleName(), uidType, machineId); + LOGGER.warn("Unable to set configuration to use {} based on UID type {} with machine ID {}", SnowflakeUIDBuilder.class.getSimpleName(), uidType, + machineId); config.set(CONFIG_UID_TYPE_KEY, HashUID.class.getSimpleName(), this.getClass().getName()); } } diff --git a/warehouse/core/src/main/java/datawave/data/hash/ZkSnowflakeCache.java b/warehouse/core/src/main/java/datawave/data/hash/ZkSnowflakeCache.java index 244286cb5bd..62cdee9be5b 100644 --- a/warehouse/core/src/main/java/datawave/data/hash/ZkSnowflakeCache.java +++ b/warehouse/core/src/main/java/datawave/data/hash/ZkSnowflakeCache.java @@ -7,9 +7,9 @@ import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.retry.RetryNTimes; +import org.apache.zookeeper.data.Stat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.zookeeper.data.Stat; public class ZkSnowflakeCache { diff --git a/warehouse/core/src/main/java/datawave/edge/model/DefaultEdgeModelFieldsFactory.java b/warehouse/core/src/main/java/datawave/edge/model/DefaultEdgeModelFieldsFactory.java index f9185c77882..2e20849da7f 100644 --- a/warehouse/core/src/main/java/datawave/edge/model/DefaultEdgeModelFieldsFactory.java +++ b/warehouse/core/src/main/java/datawave/edge/model/DefaultEdgeModelFieldsFactory.java @@ -49,8 +49,8 @@ public EdgeModelFields createFields() { fields.setTransformFieldMap((Map) context.getBean(TRANSFORM_MODEL_BEAN)); } catch (Throwable t) { log.error(fatal, "Edge model configuration not loaded!! Edge queries will fail until this issue is corrected."); - log.error(fatal, "Ensure that the Spring config file {} is on the classpath and contains bean names {}, {}, and {}", - EDGE_MODEL_CONTEXT, BASE_MODEL_BEAN, KEYUTIL_MODEL_BEAN, TRANSFORM_MODEL_BEAN, t); + log.error(fatal, "Ensure that the Spring config file {} is on the classpath and contains bean names {}, {}, and {}", EDGE_MODEL_CONTEXT, + BASE_MODEL_BEAN, KEYUTIL_MODEL_BEAN, TRANSFORM_MODEL_BEAN, t); } finally { if (context != null) { context.close(); diff --git a/warehouse/core/src/main/java/datawave/edge/util/EdgeKey.java b/warehouse/core/src/main/java/datawave/edge/util/EdgeKey.java index dc93113ff09..a7284fe4bd6 100644 --- a/warehouse/core/src/main/java/datawave/edge/util/EdgeKey.java +++ b/warehouse/core/src/main/java/datawave/edge/util/EdgeKey.java @@ -155,7 +155,7 @@ public EdgeKey build() { String tempSinkData = this.sinkData; try { log.trace("Attempting escape sequencing isEscape? {} isUnescape? {}", escape, unescape); - log.trace("Values before attempt source data {}, sink data {}",tempSourceData, tempSinkData); + log.trace("Values before attempt source data {}, sink data {}", tempSourceData, tempSinkData); if (escape && !unescape) { tempSourceData = StringEscapeUtils.escapeJava(sourceData); tempSinkData = StringEscapeUtils.escapeJava(sinkData); diff --git a/warehouse/index-stats/src/main/java/datawave/mapreduce/shardStats/StatsJob.java b/warehouse/index-stats/src/main/java/datawave/mapreduce/shardStats/StatsJob.java index a20acfd22b4..dd829ed0f95 100644 --- a/warehouse/index-stats/src/main/java/datawave/mapreduce/shardStats/StatsJob.java +++ b/warehouse/index-stats/src/main/java/datawave/mapreduce/shardStats/StatsJob.java @@ -71,7 +71,6 @@ */ public class StatsJob extends IngestJob { - // default values used by both mapper and reducer // constants for hyperloglogplus static final int HYPERLOG_SPARSE_DEFAULT_VALUE = 24; diff --git a/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsHyperLogMapperTest.java b/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsHyperLogMapperTest.java index 36a4c759823..5b77ea92e3e 100644 --- a/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsHyperLogMapperTest.java +++ b/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsHyperLogMapperTest.java @@ -17,10 +17,10 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.mapreduce.Mapper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import datawave.ingest.config.TableConfigCache; import datawave.ingest.mapreduce.handler.shard.ShardedDataTypeHandler; diff --git a/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsHyperLogReducerTest.java b/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsHyperLogReducerTest.java index 812edc3563e..6c21ce04093 100644 --- a/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsHyperLogReducerTest.java +++ b/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsHyperLogReducerTest.java @@ -19,10 +19,10 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Reducer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.clearspring.analytics.stream.cardinality.HyperLogLogPlus; diff --git a/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsHyperLogSummaryTest.java b/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsHyperLogSummaryTest.java index 24c4e68876d..904c2ca29bc 100644 --- a/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsHyperLogSummaryTest.java +++ b/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsHyperLogSummaryTest.java @@ -9,10 +9,10 @@ import org.apache.accumulo.core.data.Value; import org.apache.commons.lang.RandomStringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.clearspring.analytics.stream.cardinality.HyperLogLogPlus; diff --git a/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsJobTest.java b/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsJobTest.java index e34e115eb71..09019cc7adc 100644 --- a/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsJobTest.java +++ b/warehouse/index-stats/src/test/java/datawave/mapreduce/shardStats/StatsJobTest.java @@ -7,12 +7,12 @@ import java.util.Map; import org.apache.hadoop.conf.Configuration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.junit.Assert; import org.junit.Assume; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import datawave.ingest.mapreduce.job.IngestJob; diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/data/TypeRegistry.java b/warehouse/ingest-core/src/main/java/datawave/ingest/data/TypeRegistry.java index 78cba2f1f5a..c4329b389db 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/data/TypeRegistry.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/data/TypeRegistry.java @@ -243,7 +243,7 @@ private TypeRegistry(Configuration config) { } Type t = new Type(typeName, outputName, helperClass, readerClass, handlerClassNames, filterPriority, filterClassNames); - log.debug("Registered type {}" , t); + log.debug("Registered type {}", t); this.put(typeName, t); if (null != config.get(typeName + DataTypeOverrideHelper.Properties.DATA_TYPE_VALUES)) { diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/data/config/XMLFieldConfigHelper.java b/warehouse/ingest-core/src/main/java/datawave/ingest/data/config/XMLFieldConfigHelper.java index dfca4f17fde..3670b6ee23e 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/data/config/XMLFieldConfigHelper.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/data/config/XMLFieldConfigHelper.java @@ -14,9 +14,9 @@ import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; +import org.apache.xerces.jaxp.SAXParserFactoryImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.xerces.jaxp.SAXParserFactoryImpl; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/IngestJob.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/IngestJob.java index 5a13c39ee33..2b24cac8f49 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/IngestJob.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/IngestJob.java @@ -77,9 +77,9 @@ import org.apache.hadoop.util.ToolRunner; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Level; +import org.apache.log4j.PatternLayout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.log4j.PatternLayout; import org.springframework.util.StopWatch; import datawave.ingest.config.TableConfigCache; @@ -257,12 +257,12 @@ public int run(String[] args) throws Exception { StopWatch sw = new StopWatch("Ingest Job"); sw.start("local init"); -// LoggerFactory.getLogger(TypeRegistry.class).setLevel(Level.ALL); + // LoggerFactory.getLogger(TypeRegistry.class).setLevel(Level.ALL); ca.setLayout(new PatternLayout("%p [%c{1}] %m%n")); ca.setThreshold(Level.INFO); -// log.addAppender(ca); -// log.setLevel(Level.INFO); + // log.addAppender(ca); + // log.setLevel(Level.INFO); // Initialize the markings file helper so we get the right markings file MarkingFunctions.Factory.createMarkingFunctions();