Skip to content

Commit

Permalink
Remove usage of LazyLogging
Browse files Browse the repository at this point in the history
This cuts down on possible compatability problems across scalalogging verions.
- ArrayTile no longer warns on narrowing celltype conversion
- GeoWave/GeoMesa do not log errors with experimental notice
  • Loading branch information
echeipesh committed Oct 4, 2019
1 parent 79b2406 commit ed090b0
Show file tree
Hide file tree
Showing 68 changed files with 198 additions and 198 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,16 @@ package geotrellis.spark.io.accumulo

import geotrellis.spark.LayerId
import geotrellis.spark.io._

import com.typesafe.scalalogging.LazyLogging
import org.slf4j.LoggerFactory
import com.typesafe.scalalogging.Logger
import org.apache.accumulo.core.client.{BatchWriterConfig, Connector}
import org.apache.accumulo.core.security.Authorizations
import org.apache.accumulo.core.data.{Range => AccumuloRange}

import scala.collection.JavaConverters._

class AccumuloLayerDeleter(val attributeStore: AttributeStore, connector: Connector) extends LazyLogging with LayerDeleter[LayerId] {
class AccumuloLayerDeleter(val attributeStore: AttributeStore, connector: Connector) extends LayerDeleter[LayerId] {
@transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))

def delete(id: LayerId): Unit = {
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ import geotrellis.spark.io.avro.codecs._
import geotrellis.spark.io.index._
import geotrellis.spark.merge._
import geotrellis.util._

import com.typesafe.scalalogging.LazyLogging
import org.slf4j.LoggerFactory
import com.typesafe.scalalogging.Logger
import org.apache.spark.rdd.RDD
import spray.json._

Expand All @@ -35,7 +35,8 @@ class AccumuloLayerWriter(
instance: AccumuloInstance,
table: String,
options: AccumuloLayerWriter.Options
) extends LayerWriter[LayerId] with LazyLogging {
) extends LayerWriter[LayerId] {
@transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))

// Layer Updating
def overwrite[
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,15 @@ package geotrellis.spark.io.cassandra

import geotrellis.spark.LayerId
import geotrellis.spark.io._

import com.typesafe.scalalogging.LazyLogging
import org.slf4j.LoggerFactory
import com.typesafe.scalalogging.Logger
import com.datastax.driver.core.querybuilder.QueryBuilder
import com.datastax.driver.core.querybuilder.QueryBuilder.{eq => eqs}

import scala.collection.JavaConverters._

class CassandraLayerDeleter(val attributeStore: AttributeStore, instance: CassandraInstance) extends LazyLogging with LayerDeleter[LayerId] {
class CassandraLayerDeleter(val attributeStore: AttributeStore, instance: CassandraInstance) extends LayerDeleter[LayerId] {
@transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))

def delete(id: LayerId): Unit = {
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ import geotrellis.spark.io.avro.codecs._
import geotrellis.spark.io.index._
import geotrellis.spark.merge._
import geotrellis.util._

import com.typesafe.scalalogging.LazyLogging
import org.slf4j.LoggerFactory
import com.typesafe.scalalogging.Logger
import org.apache.spark.rdd.RDD
import spray.json._

Expand All @@ -35,7 +35,8 @@ class CassandraLayerWriter(
instance: CassandraInstance,
keyspace: String,
table: String
) extends LayerWriter[LayerId] with LazyLogging {
) extends LayerWriter[LayerId] {
@transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))

// Layer updating
def overwrite[
Expand Down
7 changes: 7 additions & 0 deletions docs/CHANGELOG.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
Changelog
=========

2.3.3
_____
*2019 Oct 4*

- Remove direct usage of `com.typesafe.scalalogging.LazyLogging` trait
- Upgrade to Proj4J 1.1.0

2.3.2
-----
*2019 Aug 12*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ import geotrellis.geotools._
import geotrellis.spark._
import geotrellis.util.annotations.experimental
import geotrellis.vector._

import com.typesafe.scalalogging.LazyLogging
import org.apache.accumulo.core.client.mapreduce.InputFormatBase
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapreduce.Job
Expand All @@ -38,9 +36,7 @@ import scala.reflect.ClassTag
* @define experimental <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>@experimental
*/
@experimental class GeoMesaFeatureReader(val instance: GeoMesaInstance)
(implicit sc: SparkContext) extends Serializable with LazyLogging {

logger.error("GeoMesa support is experimental")
(implicit sc: SparkContext) extends Serializable {

/** $experimental */
@experimental def readSimpleFeatures(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import geotrellis.spark._
import geotrellis.util.annotations.experimental
import geotrellis.vector._

import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.rdd.RDD
import org.apache.spark.SparkContext
import org.geotools.data.Transaction
Expand All @@ -31,9 +30,7 @@ import org.opengis.feature.simple.SimpleFeatureType
* @define experimental <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>@experimental
*/
@experimental class GeoMesaFeatureWriter(val instance: GeoMesaInstance)
(implicit sc: SparkContext) extends Serializable with LazyLogging {

logger.error("GeoMesa support is experimental")
(implicit sc: SparkContext) extends Serializable {

/** $experimental */
@experimental def write[G <: Geometry, D: ? => Seq[(String, Any)]: λ[α => Feature[G, α] => FeatureToGeoMesaSimpleFeatureMethods[G, α]]]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ package geotrellis.spark.io.geomesa
import geotrellis.spark.LayerId
import geotrellis.util.annotations.experimental

import com.typesafe.scalalogging.LazyLogging
import org.geotools.data.DataStoreFinder
import org.locationtech.geomesa.accumulo.data.AccumuloDataStore

Expand All @@ -28,9 +27,7 @@ import scala.collection.JavaConverters._
/**
* @define experimental <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>@experimental
*/
@experimental class GeoMesaInstance(val conf: Map[String, String])
extends Serializable with LazyLogging {
logger.error("GeoMesa support is experimental")
@experimental class GeoMesaInstance(val conf: Map[String, String]) extends Serializable {

val SEP = "__.__"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ import geotrellis.spark.io.accumulo.AccumuloAttributeStore
import geotrellis.util._
import geotrellis.util.annotations.experimental
import geotrellis.vector.Extent

import com.typesafe.scalalogging.LazyLogging
import org.slf4j.LoggerFactory
import com.typesafe.scalalogging.Logger
import org.locationtech.jts.geom._
import mil.nga.giat.geowave.adapter.raster.adapter.RasterDataAdapter
import mil.nga.giat.geowave.core.geotime.ingest._
Expand Down Expand Up @@ -127,9 +127,8 @@ import spray.json.DefaultJsonProtocol._
val accumuloUser: String,
val accumuloPass: String,
val geowaveNamespace: String
) extends DiscreteLayerAttributeStore with LazyLogging {

logger.error("GeoWave support is experimental")
) extends DiscreteLayerAttributeStore {
@transient protected lazy val logger: Logger = Logger(LoggerFactory.getLogger(getClass.getName))

val zkInstance = (new ZooKeeperInstance(accumuloInstance, zookeepers))
val token = new PasswordToken(accumuloPass)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,6 @@ import geotrellis.spark.tiling.{LayoutDefinition, MapKeyTransform}
import geotrellis.util._
import geotrellis.util.annotations.experimental
import geotrellis.vector.Extent

import com.typesafe.scalalogging.LazyLogging
import org.locationtech.jts.geom._
import mil.nga.giat.geowave.adapter.raster.adapter.RasterDataAdapter
import mil.nga.giat.geowave.core.geotime.ingest._
Expand Down Expand Up @@ -94,9 +92,7 @@ object GeoWaveLayerReader {
* @define experimental <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>@experimental
*/
@experimental class GeoWaveLayerReader(val attributeStore: AttributeStore)
(implicit sc: SparkContext) extends LazyLogging {

logger.error("GeoWave support is experimental")
(implicit sc: SparkContext) {

val defaultNumPartitions = sc.defaultParallelism

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ import geotrellis.spark.io.index.KeyIndex
import geotrellis.util._
import geotrellis.util.annotations.experimental
import geotrellis.vector.Extent

import com.typesafe.scalalogging.LazyLogging
import org.slf4j.LoggerFactory
import com.typesafe.scalalogging.Logger
import mil.nga.giat.geowave.adapter.raster.adapter.merge.RasterTileRowTransform
import mil.nga.giat.geowave.adapter.raster.adapter.RasterDataAdapter
import mil.nga.giat.geowave.core.geotime.index.dimension._
Expand Down Expand Up @@ -70,7 +70,8 @@ import mil.nga.giat.geowave.core.store.data.VisibilityWriter
/**
* @define experimental <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>@experimental
*/
@experimental object GeoWaveLayerWriter extends LazyLogging {
@experimental object GeoWaveLayerWriter {
@transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))

/** $experimental */
@experimental def write[
Expand Down Expand Up @@ -247,10 +248,8 @@ import mil.nga.giat.geowave.core.store.data.VisibilityWriter
@experimental class GeoWaveLayerWriter(
val attributeStore: GeoWaveAttributeStore,
val accumuloWriter: AccumuloWriteStrategy
)(implicit sc: SparkContext)
extends LazyLogging {

logger.error("GeoWave support is experimental")
)(implicit sc: SparkContext) {
@transient protected lazy val logger: Logger = Logger(LoggerFactory.getLogger(getClass.getName))

/** $experimental */
@experimental def write[
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,15 @@ package geotrellis.spark.io.hbase

import geotrellis.spark.LayerId
import geotrellis.spark.io._

import com.typesafe.scalalogging.LazyLogging
import org.slf4j.LoggerFactory
import com.typesafe.scalalogging.Logger
import org.apache.hadoop.hbase.client._
import org.apache.hadoop.hbase.filter.PrefixFilter

import scala.collection.JavaConverters._

class HBaseLayerDeleter(val attributeStore: AttributeStore, instance: HBaseInstance) extends LazyLogging with LayerDeleter[LayerId] {
class HBaseLayerDeleter(val attributeStore: AttributeStore, instance: HBaseInstance) extends LayerDeleter[LayerId] {
@transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))

def delete(id: LayerId): Unit = {
try{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ import geotrellis.spark.io.avro.codecs._
import geotrellis.spark.io.index._
import geotrellis.spark.merge._
import geotrellis.util._

import com.typesafe.scalalogging.LazyLogging
import org.slf4j.LoggerFactory
import com.typesafe.scalalogging.Logger
import org.apache.spark.rdd.RDD
import spray.json._

Expand All @@ -34,7 +34,8 @@ class HBaseLayerWriter(
val attributeStore: AttributeStore,
instance: HBaseInstance,
table: String
) extends LayerWriter[LayerId] with LazyLogging {
) extends LayerWriter[LayerId] {
@transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))

// Layer Updating
def overwrite[
Expand Down
2 changes: 1 addition & 1 deletion project/Version.scala
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
*/

object Version {
val geotrellis = "2.3.2" + Environment.versionSuffix
val geotrellis = "2.3.3" + Environment.versionSuffix
val scala = "2.11.12"
val crossScala = Seq(scala, "2.12.7")
val geotools = "20.0"
Expand Down
3 changes: 0 additions & 3 deletions raster/src/main/scala/geotrellis/raster/ArrayTile.scala
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,6 @@ trait ArrayTile extends Tile with Serializable {
def convert(targetCellType: CellType): ArrayTile = {
val tile = ArrayTile.alloc(targetCellType, cols, rows)

if(targetCellType.isFloatingPoint != cellType.isFloatingPoint)
logger.debug(s"Conversion from $cellType to $targetCellType may lead to data loss.")

if(!cellType.isFloatingPoint) {
cfor(0)(_ < rows, _ + 1) { row =>
cfor(0)(_ < cols, _ + 1) { col =>
Expand Down
3 changes: 0 additions & 3 deletions raster/src/main/scala/geotrellis/raster/CompositeTile.scala
Original file line number Diff line number Diff line change
Expand Up @@ -166,9 +166,6 @@ case class CompositeTile(tiles: Seq[Tile],
if (cols.toLong * rows.toLong > Int.MaxValue.toLong) {
sys.error("This tiled raster is too big to convert into an array.")
} else {
if(targetCellType.isFloatingPoint != cellType.isFloatingPoint)
logger.warn(s"Conversion from $cellType to $targetCellType may lead to data loss.")

val tile = ArrayTile.alloc(targetCellType, cols, rows)
val len = cols * rows
val layoutCols = tileLayout.layoutCols
Expand Down
3 changes: 0 additions & 3 deletions raster/src/main/scala/geotrellis/raster/ConstantTile.scala
Original file line number Diff line number Diff line change
Expand Up @@ -75,9 +75,6 @@ trait ConstantTile extends Tile {
* @return The new Tile
*/
def convert(newType: CellType): Tile = {
if(newType.isFloatingPoint != cellType.isFloatingPoint)
logger.warn(s"Conversion from $cellType to $newType may lead to data loss.")

newType match {
case BitCellType => new BitConstantTile(if (iVal == 0) false else true, cols, rows)
case ct: ByteCells => ByteConstantTile(iVal.toByte, cols, rows, ct)
Expand Down
3 changes: 0 additions & 3 deletions raster/src/main/scala/geotrellis/raster/CroppedTile.scala
Original file line number Diff line number Diff line change
Expand Up @@ -138,9 +138,6 @@ case class CroppedTile(sourceTile: Tile,
* @return An MutableArrayTile
*/
def mutable(targetCellType: CellType): MutableArrayTile = {
if(targetCellType.isFloatingPoint != cellType.isFloatingPoint)
logger.warn(s"Conversion from $cellType to $targetCellType may lead to data loss.")

val tile = ArrayTile.alloc(targetCellType, cols, rows)

if(!cellType.isFloatingPoint) {
Expand Down
5 changes: 2 additions & 3 deletions raster/src/main/scala/geotrellis/raster/Tile.scala
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,11 @@

package geotrellis.raster

import com.typesafe.scalalogging.LazyLogging

/**
* Base trait for a Tile.
*/
trait Tile extends CellGrid with IterableTile with MappableTile[Tile] with LazyLogging {
trait Tile extends CellGrid with IterableTile with MappableTile[Tile] {

/**
* Execute a function at each pixel of a [[Tile]]. Two functions
Expand Down Expand Up @@ -97,7 +96,7 @@ trait Tile extends CellGrid with IterableTile with MappableTile[Tile] with LazyL

/**
* Returns a mutable instance of this tile.
*
*
* @note When the underlying class is an instance of [[MutableArrayTile]] it will return itself without performing a copy.
* This is used internally as a performance optimization when the ownership of the tile is controlled.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ package geotrellis.raster.io.geotiff
import geotrellis.raster._
import geotrellis.raster.io.geotiff.compression._

import com.typesafe.scalalogging.LazyLogging
import spire.syntax.cfor._

import scala.collection.mutable
Expand Down Expand Up @@ -276,7 +275,7 @@ object GeoTiffMultibandTile {
val segmentPixelCols = segmentLayout.tileLayout.tileCols
val segmentPixelRows = segmentLayout.tileLayout.tileRows

val segments: Iterator[((Int, Int), MultibandTile)] =
val segments: Iterator[((Int, Int), MultibandTile)] =
for {
windowRowMin <- Iterator.range(start = 0, end = tile.rows, step = segmentPixelRows)
windowColMin <- Iterator.range(start = 0, end = tile.cols, step = segmentPixelCols)
Expand Down Expand Up @@ -308,7 +307,7 @@ abstract class GeoTiffMultibandTile(
val compression: Compression,
val bandCount: Int,
val overviews: List[GeoTiffMultibandTile] = Nil
) extends MultibandTile with GeoTiffImageData with GeoTiffSegmentLayoutTransform with MacroGeotiffMultibandCombiners with LazyLogging {
) extends MultibandTile with GeoTiffImageData with GeoTiffSegmentLayoutTransform with MacroGeotiffMultibandCombiners {
val cellType: CellType
val cols: Int = segmentLayout.totalCols
val rows: Int = segmentLayout.totalRows
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -317,9 +317,6 @@ abstract class GeoTiffTile(
* @return A new [[Tile]] that contains the new CellTypes
*/
def convert(newCellType: CellType): GeoTiffTile = {
if(newCellType.isFloatingPoint != cellType.isFloatingPoint)
logger.warn(s"Conversion from $cellType to $newCellType may lead to data loss.")

val arr = Array.ofDim[Array[Byte]](segmentCount)
val compressor = compression.createCompressor(segmentCount)

Expand Down
Loading

0 comments on commit ed090b0

Please sign in to comment.