Skip to content

Commit

Permalink
GitHub Actions: Fix Formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
GitHub Actions committed Jan 15, 2025
1 parent f45f1aa commit d13aed6
Show file tree
Hide file tree
Showing 12 changed files with 24 additions and 25 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -88,26 +88,26 @@ private void configure(final Configuration config, final Map<String,Option> opti
if (options.size() < 4) {
uidType = HashUID.class.getSimpleName();
LOGGER.warn("Unable to configure UID type {}", SnowflakeUID.class.getSimpleName(),
new IllegalArgumentException("Insufficient number of 'Snowflake' options: " + options));
new IllegalArgumentException("Insufficient number of 'Snowflake' options: " + options));
}
} else if (!HashUID.class.getSimpleName().equals(uidType)) {
final String invalidType = uidType;
uidType = HashUID.class.getSimpleName();
LOGGER.warn("Defaulting configuration to UID type {} due to unspecified value", HashUID.class.getSimpleName(),
new IllegalArgumentException("Unrecognized UID type: " + invalidType));
new IllegalArgumentException("Unrecognized UID type: " + invalidType));
}
config.set(CONFIG_UID_TYPE_KEY, uidType, this.getClass().getName());

// Configure Snowflake machine ID
if (SnowflakeUID.class.getSimpleName().equals(uidType)) {
int machineId = SnowflakeUIDBuilder.newMachineId(options);
if (machineId >= 0) {
LOGGER.debug("Setting configuration {} to use {} based on UID type {} and machine ID {}",
config.hashCode(), SnowflakeUIDBuilder.class.getSimpleName(), uidType, machineId);
LOGGER.debug("Setting configuration {} to use {} based on UID type {} and machine ID {}", config.hashCode(),
SnowflakeUIDBuilder.class.getSimpleName(), uidType, machineId);
config.setInt(CONFIG_MACHINE_ID_KEY, machineId);
} else if (LOGGER.isDebugEnabled()) {
LOGGER.warn("Unable to set configuration to use {} based on UID type {} with machine ID {}",
SnowflakeUIDBuilder.class.getSimpleName(), uidType, machineId);
LOGGER.warn("Unable to set configuration to use {} based on UID type {} with machine ID {}", SnowflakeUIDBuilder.class.getSimpleName(), uidType,
machineId);
config.set(CONFIG_UID_TYPE_KEY, HashUID.class.getSimpleName(), this.getClass().getName());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.retry.RetryNTimes;
import org.apache.zookeeper.data.Stat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.zookeeper.data.Stat;

public class ZkSnowflakeCache {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ public EdgeModelFields createFields() {
fields.setTransformFieldMap((Map<String,String>) context.getBean(TRANSFORM_MODEL_BEAN));
} catch (Throwable t) {
log.error(fatal, "Edge model configuration not loaded!! Edge queries will fail until this issue is corrected.");
log.error(fatal, "Ensure that the Spring config file {} is on the classpath and contains bean names {}, {}, and {}",
EDGE_MODEL_CONTEXT, BASE_MODEL_BEAN, KEYUTIL_MODEL_BEAN, TRANSFORM_MODEL_BEAN, t);
log.error(fatal, "Ensure that the Spring config file {} is on the classpath and contains bean names {}, {}, and {}", EDGE_MODEL_CONTEXT,
BASE_MODEL_BEAN, KEYUTIL_MODEL_BEAN, TRANSFORM_MODEL_BEAN, t);
} finally {
if (context != null) {
context.close();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ public EdgeKey build() {
String tempSinkData = this.sinkData;
try {
log.trace("Attempting escape sequencing isEscape? {} isUnescape? {}", escape, unescape);
log.trace("Values before attempt source data {}, sink data {}",tempSourceData, tempSinkData);
log.trace("Values before attempt source data {}, sink data {}", tempSourceData, tempSinkData);
if (escape && !unescape) {
tempSourceData = StringEscapeUtils.escapeJava(sourceData);
tempSinkData = StringEscapeUtils.escapeJava(sinkData);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@
*/
public class StatsJob extends IngestJob {


// default values used by both mapper and reducer
// constants for hyperloglogplus
static final int HYPERLOG_SPARSE_DEFAULT_VALUE = 24;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.mapreduce.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import datawave.ingest.config.TableConfigCache;
import datawave.ingest.mapreduce.handler.shard.ShardedDataTypeHandler;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,10 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Reducer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.clearspring.analytics.stream.cardinality.HyperLogLogPlus;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@

import org.apache.accumulo.core.data.Value;
import org.apache.commons.lang.RandomStringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.clearspring.analytics.stream.cardinality.HyperLogLogPlus;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@
import java.util.Map;

import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import datawave.ingest.mapreduce.job.IngestJob;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ private TypeRegistry(Configuration config) {
}

Type t = new Type(typeName, outputName, helperClass, readerClass, handlerClassNames, filterPriority, filterClassNames);
log.debug("Registered type {}" , t);
log.debug("Registered type {}", t);
this.put(typeName, t);

if (null != config.get(typeName + DataTypeOverrideHelper.Properties.DATA_TYPE_VALUES)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;

import org.apache.xerces.jaxp.SAXParserFactoryImpl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.xerces.jaxp.SAXParserFactoryImpl;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,9 +77,9 @@
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.ConsoleAppender;
import org.apache.log4j.Level;
import org.apache.log4j.PatternLayout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.log4j.PatternLayout;
import org.springframework.util.StopWatch;

import datawave.ingest.config.TableConfigCache;
Expand Down Expand Up @@ -257,12 +257,12 @@ public int run(String[] args) throws Exception {
StopWatch sw = new StopWatch("Ingest Job");
sw.start("local init");

// LoggerFactory.getLogger(TypeRegistry.class).setLevel(Level.ALL);
// LoggerFactory.getLogger(TypeRegistry.class).setLevel(Level.ALL);

ca.setLayout(new PatternLayout("%p [%c{1}] %m%n"));
ca.setThreshold(Level.INFO);
// log.addAppender(ca);
// log.setLevel(Level.INFO);
// log.addAppender(ca);
// log.setLevel(Level.INFO);

// Initialize the markings file helper so we get the right markings file
MarkingFunctions.Factory.createMarkingFunctions();
Expand Down

0 comments on commit d13aed6

Please sign in to comment.