Skip to content

Commit

Permalink
Merge branch 'release/0.10.1'
Browse files Browse the repository at this point in the history
  • Loading branch information
metasim committed Mar 14, 2022
2 parents 14fe14b + 8025286 commit 86c69c0
Show file tree
Hide file tree
Showing 38 changed files with 1,179 additions and 342 deletions.
File renamed without changes.
7 changes: 5 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -120,12 +120,15 @@ lazy val datasource = project
moduleName := "rasterframes-datasource",
libraryDependencies ++= Seq(
compilerPlugin("org.scalamacros" % "paradise" % "2.1.1" cross CrossVersion.full),
compilerPlugin("org.typelevel" % "kind-projector" % "0.13.2" cross CrossVersion.full),
sttpCatsCe2,
stac4s,
framelessRefined excludeAll ExclusionRule(organization = "com.github.mpilquist"),
geotrellis("s3").value excludeAll ExclusionRule(organization = "com.github.mpilquist"),
spark("core").value % Provided,
spark("mllib").value % Provided,
spark("sql").value % Provided
spark("sql").value % Provided,
`better-files`
),
Compile / console / scalacOptions ~= { _.filterNot(Set("-Ywarn-unused-import", "-Ywarn-unused:imports")) },
Test / console / scalacOptions ~= { _.filterNot(Set("-Ywarn-unused-import", "-Ywarn-unused:imports")) },
Expand Down Expand Up @@ -173,7 +176,7 @@ lazy val docs = project
Compile / paradoxMaterialTheme ~= { _
.withRepository(uri("https://github.com/locationtech/rasterframes"))
.withCustomStylesheet("assets/custom.css")
.withCopyright("""&copy; 2017-2019 <a href="https://astraea.earth">Astraea</a>, Inc. All rights reserved.""")
.withCopyright("""&copy; 2017-2021 <a href="https://astraea.earth">Astraea</a>, Inc. All rights reserved.""")
.withLogo("assets/images/RF-R.svg")
.withFavicon("assets/images/RasterFrames_32x32.ico")
.withColor("blue-grey", "light-blue")
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/sql/rf/package.scala
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ package object rf {

/** Lookup the registered Catalyst UDT for the given Scala type. */
def udtOf[T >: Null: TypeTag]: UserDefinedType[T] =
UDTRegistration.getUDTFor(typeTag[T].tpe.toString).map(_.newInstance().asInstanceOf[UserDefinedType[T]])
UDTRegistration.getUDTFor(typeTag[T].tpe.toString).map(_.getDeclaredConstructor().newInstance().asInstanceOf[UserDefinedType[T]])
.getOrElse(throw new IllegalArgumentException(typeTag[T].tpe + " doesn't have a corresponding UDT"))

/** Creates a Catalyst expression for flattening the fields in a struct into columns. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ import org.locationtech.geomesa.spark.jts.encoders.SpatialEncoders
import org.locationtech.rasterframes.model.{CellContext, LongExtent, TileContext, TileDataContext}
import frameless.TypedEncoder
import geotrellis.raster.mapalgebra.focal.{Kernel, Neighborhood, TargetCell}
import org.locationtech.rasterframes.ref.RFRasterSource
import org.locationtech.rasterframes.tiles.ProjectedRasterTile

import java.net.URI
import java.sql.Timestamp
Expand All @@ -45,14 +47,14 @@ trait StandardEncoders extends SpatialEncoders with TypedEncoders {
implicit def optionalEncoder[T: TypedEncoder]: ExpressionEncoder[Option[T]] = typedExpressionEncoder[Option[T]]

implicit lazy val strMapEncoder: ExpressionEncoder[Map[String, String]] = ExpressionEncoder()
implicit lazy val crsExpressionEncoder: ExpressionEncoder[CRS] = ExpressionEncoder()
implicit lazy val projectedExtentEncoder: ExpressionEncoder[ProjectedExtent] = ExpressionEncoder()
implicit lazy val temporalProjectedExtentEncoder: ExpressionEncoder[TemporalProjectedExtent] = ExpressionEncoder()
implicit lazy val timestampEncoder: ExpressionEncoder[Timestamp] = ExpressionEncoder()
implicit lazy val cellStatsEncoder: ExpressionEncoder[CellStatistics] = ExpressionEncoder()
implicit lazy val cellHistEncoder: ExpressionEncoder[CellHistogram] = ExpressionEncoder()
implicit lazy val localCellStatsEncoder: ExpressionEncoder[LocalCellStatistics] = ExpressionEncoder()

implicit lazy val crsExpressionEncoder: ExpressionEncoder[CRS] = typedExpressionEncoder
implicit lazy val uriEncoder: ExpressionEncoder[URI] = typedExpressionEncoder[URI]
implicit lazy val neighborhoodEncoder: ExpressionEncoder[Neighborhood] = typedExpressionEncoder[Neighborhood]
implicit lazy val targetCellEncoder: ExpressionEncoder[TargetCell] = typedExpressionEncoder[TargetCell]
Expand All @@ -78,6 +80,11 @@ trait StandardEncoders extends SpatialEncoders with TypedEncoders {

implicit lazy val tileEncoder: ExpressionEncoder[Tile] = typedExpressionEncoder
implicit def rasterEncoder[T <: CellGrid[Int]: TypedEncoder]: ExpressionEncoder[Raster[T]] = typedExpressionEncoder[Raster[T]]

// Intentionally not implicit, defined as implicit in the ProjectedRasterTile companion object
lazy val projectedRasterTileEncoder: ExpressionEncoder[ProjectedRasterTile] = typedExpressionEncoder
// Intentionally not implicit, defined as implicit in the RFRasterSource companion object
lazy val rfRasterSourceEncoder: ExpressionEncoder[RFRasterSource] = typedExpressionEncoder
}

object StandardEncoders extends StandardEncoders
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.catalyst.util.QuantileSummaries
import org.apache.spark.sql.rf.{CrsUDT, RasterSourceUDT, TileUDT}
import org.locationtech.jts.geom.Envelope
import org.locationtech.rasterframes.ref.RFRasterSource
import org.locationtech.rasterframes.tiles.ProjectedRasterTile
import org.locationtech.rasterframes.util.{FocalNeighborhood, FocalTargetCell, KryoSupport}

import java.net.URI
Expand All @@ -23,6 +25,8 @@ trait TypedEncoders {
implicit val tileUDT = new TileUDT
implicit val rasterSourceUDT = new RasterSourceUDT

implicit val crsTypedEncoder: TypedEncoder[CRS] = TypedEncoder.usingUserDefinedType[CRS]

implicit val cellTypeInjection: Injection[CellType, String] = Injection(_.toString, CellType.fromName)
implicit val cellTypeTypedEncoder: TypedEncoder[CellType] = TypedEncoder.usingInjection[CellType, String]

Expand Down Expand Up @@ -89,7 +93,20 @@ trait TypedEncoders {
implicit val tileTypedEncoder: TypedEncoder[Tile] = TypedEncoder.usingUserDefinedType[Tile]
implicit def rasterTileTypedEncoder[T <: CellGrid[Int]: TypedEncoder]: TypedEncoder[Raster[T]] = TypedEncoder.usingDerivation

// Derivation is done through frameless to trigger RasterSourceUDT load
implicit val rfRasterSourceTypedEncoder: TypedEncoder[RFRasterSource] = TypedEncoder.usingUserDefinedType[RFRasterSource]

implicit val kernelTypedEncoder: TypedEncoder[Kernel] = TypedEncoder.usingDerivation

// Derivation is done through frameless to trigger the TileUDT and CrsUDT load
implicit val projectedRasterTileTypedEncoder: TypedEncoder[ProjectedRasterTile] =
ManualTypedEncoder.newInstance[ProjectedRasterTile](
fields = List(
RecordEncoderField(0, "tile", TypedEncoder[Tile]),
RecordEncoderField(1, "extent", TypedEncoder[Extent]),
RecordEncoderField(2, "crs", TypedEncoder[CRS])
)
)
}

object TypedEncoders extends TypedEncoders
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ import org.apache.spark.sql.types.{DataType, StructType}
import org.apache.spark.sql.{Column, Row, TypedColumn}

class ProjectedLayerMetadataAggregate(destCRS: CRS, destDims: Dimensions[Int]) extends UserDefinedAggregateFunction {

import ProjectedLayerMetadataAggregate._

def inputSchema: StructType = InputRecord.inputRecordEncoder.schema
Expand All @@ -47,10 +48,10 @@ class ProjectedLayerMetadataAggregate(destCRS: CRS, destDims: Dimensions[Int]) e
def initialize(buffer: MutableAggregationBuffer): Unit = ()

def update(buffer: MutableAggregationBuffer, input: Row): Unit = {
if(!input.isNullAt(0)) {
if (!input.isNullAt(0)) {
val in = input.as[InputRecord]

if(buffer.isNullAt(0)) {
if (buffer.isNullAt(0)) {
in.toBufferRecord(destCRS).write(buffer)
} else {
val br = buffer.as[BufferRecord]
Expand All @@ -71,16 +72,15 @@ class ProjectedLayerMetadataAggregate(destCRS: CRS, destDims: Dimensions[Int]) e
case _ => ()
}

def evaluate(buffer: Row): Any = {
val buf = buffer.as[BufferRecord]
if (buf.isEmpty) throw new IllegalArgumentException("Can not collect metadata from empty data frame.")
def evaluate(buffer: Row): Any =
Option(buffer).map(_.as[BufferRecord]).filter(!_.isEmpty).map(buf => {
val re = RasterExtent(buf.extent, buf.cellSize)
val layout = LayoutDefinition(re, destDims.cols, destDims.rows)

val re = RasterExtent(buf.extent, buf.cellSize)
val layout = LayoutDefinition(re, destDims.cols, destDims.rows)
val kb = KeyBounds(layout.mapTransform(buf.extent))
TileLayerMetadata(buf.cellType, layout, buf.extent, destCRS, kb).toRow

val kb = KeyBounds(layout.mapTransform(buf.extent))
TileLayerMetadata(buf.cellType, layout, buf.extent, destCRS, kb).toRow
}
}).getOrElse(throw new IllegalArgumentException("Can not collect metadata from empty data frame."))
}

object ProjectedLayerMetadataAggregate {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import geotrellis.vector.Extent
import org.apache.hadoop.conf.Configuration
import org.apache.spark.annotation.Experimental
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.rf.RasterSourceUDT
import org.locationtech.rasterframes.encoders.StandardEncoders
import org.locationtech.rasterframes.model.TileContext
import org.locationtech.rasterframes.{NOMINAL_TILE_DIMS, rfConfig}

Expand Down Expand Up @@ -100,10 +100,7 @@ object RFRasterSource extends LazyLogging {

def cacheStats = rsCache.stats()

implicit def rsEncoder: ExpressionEncoder[RFRasterSource] = {
RasterSourceUDT // Makes sure UDT is registered first
ExpressionEncoder()
}
implicit lazy val rsEncoder: ExpressionEncoder[RFRasterSource] = StandardEncoders.rfRasterSourceEncoder

def apply(source: URI): RFRasterSource =
rsCache.get(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import geotrellis.vector.{Extent, ProjectedExtent}
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.locationtech.rasterframes.ref.ProjectedRasterLike
import org.apache.spark.sql.catalyst.DefinedByConstructorParams
import org.locationtech.rasterframes.encoders.StandardEncoders

/**
* A Tile that's also like a ProjectedRaster, with delayed evaluation support.
Expand Down Expand Up @@ -58,5 +59,6 @@ object ProjectedRasterTile {

def unapply(prt: ProjectedRasterTile): Option[(Tile, Extent, CRS)] = Some((prt.tile, prt.extent, prt.crs))

implicit lazy val projectedRasterTileEncoder: ExpressionEncoder[ProjectedRasterTile] = ExpressionEncoder()
implicit lazy val projectedRasterTileEncoder: ExpressionEncoder[ProjectedRasterTile] =
StandardEncoders.projectedRasterTileEncoder
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ package org.locationtech.rasterframes.expressions
import geotrellis.proj4.{CRS, LatLng, WebMercator}
import geotrellis.raster.CellType
import geotrellis.vector._
import org.apache.spark.sql.Encoders
import org.apache.spark.sql.jts.JTSTypes
import org.locationtech.geomesa.curve.{XZ2SFC, Z2SFC}
import org.locationtech.rasterframes._
Expand Down Expand Up @@ -151,7 +150,6 @@ class SFCIndexerSpec extends TestEnvironment with Inspectors {
val tile = TestData.randomTile(2, 2, CellType.fromName("uint8"))
val prts = testExtents.map(reproject(crs)).map(ProjectedRasterTile(tile, _, crs))

implicit val enc = Encoders.tuple(ProjectedRasterTile.projectedRasterTileEncoder, Encoders.scalaInt)
// The `id` here is to deal with Spark auto projecting single columns dataframes and needing to provide an encoder
val df = prts.zipWithIndex.toDF("proj_raster", "id")
withClue("XZ2") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,5 @@ org.locationtech.rasterframes.datasource.geotrellis.GeoTrellisCatalog
org.locationtech.rasterframes.datasource.raster.RasterSourceDataSource
org.locationtech.rasterframes.datasource.geojson.GeoJsonDataSource
org.locationtech.rasterframes.datasource.stac.api.StacApiDataSource
org.locationtech.rasterframes.datasource.tiles.TilesDataSource
org.locationtech.rasterframes.datasource.slippy.SlippyDataSource
77 changes: 77 additions & 0 deletions datasource/src/main/resources/slippy.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
<!DOCTYPE html>
<!--
~ This software is licensed under the Apache 2 license, quoted below.
~
~ Copyright 2021 Astraea. Inc.
~
~ Licensed under the Apache License, Version 2.0 (the "License"); you may not
~ use this file except in compliance with the License. You may obtain a copy of
~ the License at
~
~ [http://www.apache.org/licenses/LICENSE-2.0]
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
~ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
~ License for the specific language governing permissions and limitations under
~ the License.
~
~
-->

<html lang="en">
<head>
<title>RasterFrames Rendering</title>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
<meta property="eai:center" content="(${viewLat},${viewLon})"/>
<meta property="eai:maxZoom" content="${maxNativeZoom}"/>
<link rel="stylesheet" href="https://unpkg.com/[email protected]/dist/leaflet.css" integrity="sha512-Rksm5RenBEKSKFjgI3a41vrjkw4EVPlJ3+OiI65vTjIdo9brlAacEuKOiQ5OFh7cOI1bkDwLqdLw3Zg0cRJAAQ==" crossorigin=""/>
<script src="https://unpkg.com/[email protected]/dist/leaflet.js" integrity="sha512-/Nsx9X4HebavoBvEBuyp3I7od5tA0UzAxs+j83KgC8PU0kgB4XiK4Lfe4y4cgBtaRJQEIFCW+oC506aPT2L1zw==" crossorigin=""></script>
<link rel="stylesheet" href="https://unpkg.com/leaflet-control-geocoder/dist/Control.Geocoder.css" />
<script src="https://unpkg.com/leaflet-control-geocoder/dist/Control.Geocoder.js"></script>
<style>
#mapid {
position: absolute;
top: 10px;
bottom: 10px;
left: 10px;
right: 10px;
}
</style>
</head>
<body>

<div id="mapid"></div>

<script>

var map = L.map('mapid')
.setView([${viewLat}, ${viewLon}], ${maxNativeZoom});

L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {attribution: '&copy; <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors'}).addTo(map);

L.tileLayer(
'{z}/{x}/{y}.png', {
maxZoom: 18,
maxNativeZoom: ${maxNativeZoom}
}
).addTo(map);

L.control.scale().addTo(map);

L.Control.geocoder().addTo(map);

var popup = L.popup();

function showPos(e) {
popup
.setLatLng(e.latlng)
.setContent(e.latlng.toString())
.openOn(map);
}

map.on('click', showPos);
</script>
</body>
</html>

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ package org.locationtech.rasterframes
import cats.syntax.option._
import io.circe.Json
import io.circe.parser
import org.apache.spark.sql.{Column, DataFrame}
import org.apache.spark.sql.util.CaseInsensitiveStringMap
import sttp.model.Uri

Expand Down Expand Up @@ -72,4 +73,48 @@ package object datasource {
def jsonParam(key: String, parameters: CaseInsensitiveStringMap): Option[Json] =
if(parameters.containsKey(key)) parser.parse(parameters.get(key)).toOption
else None


/**
* Convenience grouping for transient columns defining spatial context.
*/
private[rasterframes]
case class SpatialComponents(crsColumn: Column,
extentColumn: Column,
dimensionColumn: Column,
cellTypeColumn: Column)

private[rasterframes]
object SpatialComponents {
def apply(tileColumn: Column, crsColumn: Column, extentColumn: Column): SpatialComponents = {
val dim = rf_dimensions(tileColumn) as "dims"
val ct = rf_cell_type(tileColumn) as "cellType"
SpatialComponents(crsColumn, extentColumn, dim, ct)
}
def apply(prColumn : Column): SpatialComponents = {
SpatialComponents(
rf_crs(prColumn) as "crs",
rf_extent(prColumn) as "extent",
rf_dimensions(prColumn) as "dims",
rf_cell_type(prColumn) as "cellType"
)
}
}

/**
* If the given DataFrame has extent and CRS columns return the DataFrame, the CRS column an extent column.
* Otherwise, see if there's a `ProjectedRaster` column add `crs` and `extent` columns extracted from the
* `ProjectedRaster` column to the returned DataFrame.
*
* @param d DataFrame to process.
* @return Tuple containing the updated DataFrame followed by the CRS column and the extent column
*/
private[rasterframes]
def projectSpatialComponents(d: DataFrame): Option[SpatialComponents] =
d.tileColumns.headOption.zip(d.crsColumns.headOption.zip(d.extentColumns.headOption)).headOption
.map { case (tile, (crs, extent)) => SpatialComponents(tile, crs, extent) }
.orElse(
d.projRasterColumns.headOption
.map(pr => SpatialComponents(pr))
)
}
Loading

0 comments on commit 86c69c0

Please sign in to comment.