Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DOCS] Remove unneeded trailing semicolons from Scala files #1625

Merged
merged 1 commit into from
Oct 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ object Adapter {
import scala.jdk.CollectionConverters._
if (spatialRDD.fieldNames != null)
return toDf(spatialRDD, spatialRDD.fieldNames.asScala.toList, sparkSession)
toDf(spatialRDD = spatialRDD, fieldNames = null, sparkSession = sparkSession);
toDf(spatialRDD = spatialRDD, fieldNames = null, sparkSession = sparkSession)
}

def toDf[T <: Geometry](
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ object RasterSerializer {
* Array of bites represents this geometry
*/
def serialize(raster: GridCoverage2D): Array[Byte] = {
Serde.serialize(raster);
Serde.serialize(raster)
}

/**
Expand All @@ -44,6 +44,6 @@ object RasterSerializer {
* GridCoverage2D
*/
def deserialize(value: Array[Byte]): GridCoverage2D = {
Serde.deserialize(value);
Serde.deserialize(value)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -893,13 +893,13 @@ object st_functions extends DataFrameAPI {
wrapExpression[ST_HausdorffDistance](g1, g2, -1)

def ST_HausdorffDistance(g1: String, g2: String) =
wrapExpression[ST_HausdorffDistance](g1, g2, -1);
wrapExpression[ST_HausdorffDistance](g1, g2, -1)

def ST_HausdorffDistance(g1: Column, g2: Column, densityFrac: Column) =
wrapExpression[ST_HausdorffDistance](g1, g2, densityFrac);
wrapExpression[ST_HausdorffDistance](g1, g2, densityFrac)

def ST_HausdorffDistance(g1: String, g2: String, densityFrac: Double) =
wrapExpression[ST_HausdorffDistance](g1, g2, densityFrac);
wrapExpression[ST_HausdorffDistance](g1, g2, densityFrac)

def ST_CoordDim(geometry: Column): Column = wrapExpression[ST_CoordDim](geometry)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ case class BroadcastQuerySideKNNJoinExec(
require(kValue > 0, "The number of neighbors must be greater than 0.")
objectsShapes.setNeighborSampleNumber(kValue)

val joinPartitions: Integer = numPartitions;
val joinPartitions: Integer = numPartitions
broadcastJoin = false

// expand the boundary for partition to include both RDDs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ class SpatialJoinSuite extends TestBaseScala with TableDrivenPropertyChecks {
df.createOrReplaceTempView("df10parts")

val query =
"SELECT * FROM df10parts JOIN dfEmpty WHERE ST_Intersects(df10parts.geom, dfEmpty.geom)";
"SELECT * FROM df10parts JOIN dfEmpty WHERE ST_Intersects(df10parts.geom, dfEmpty.geom)"
withConf(Map(spatialJoinPartitionSideConfKey -> "left")) {
val resultRows = sparkSession.sql(query).collect()
assert(resultRows.isEmpty)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,7 @@ class constructorTestScala extends TestBaseScala {
val geometries =
sparkSession.sql("SELECT ST_GeomFromWKB(rawWKBTable.wkb) as countyshape from rawWKBTable")
val expectedGeom =
"LINESTRING (-2.1047439575195312 -0.354827880859375, -1.49606454372406 -0.6676061153411865)";
"LINESTRING (-2.1047439575195312 -0.354827880859375, -1.49606454372406 -0.6676061153411865)"
assert(geometries.first().getAs[Geometry](0).toString.equals(expectedGeom))
// null input
val nullGeom = sparkSession.sql("SELECT ST_GeomFromWKB(null)")
Expand Down Expand Up @@ -501,7 +501,7 @@ class constructorTestScala extends TestBaseScala {
val geometries = sparkSession.sql(
"SELECT ST_GeomFromEWKB(rawWKBTable.wkb) as countyshape from rawWKBTable")
val expectedGeom =
"LINESTRING (-2.1047439575195312 -0.354827880859375, -1.49606454372406 -0.6676061153411865)";
"LINESTRING (-2.1047439575195312 -0.354827880859375, -1.49606454372406 -0.6676061153411865)"
assert(geometries.first().getAs[Geometry](0).toString.equals(expectedGeom))
// null input
val nullGeom = sparkSession.sql("SELECT ST_GeomFromEWKB(null)")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1349,7 +1349,7 @@ class dataFrameAPITestScala extends TestBaseScala {
.select(ST_MinimumBoundingCircle("geom").as("geom"))
.selectExpr("ST_ReducePrecision(geom, 2)")
val actualResult = df.take(1)(0).get(0).asInstanceOf[Geometry].getCoordinates().length
val expectedResult = BufferParameters.DEFAULT_QUADRANT_SEGMENTS * 6 * 4 + 1;
val expectedResult = BufferParameters.DEFAULT_QUADRANT_SEGMENTS * 6 * 4 + 1
assert(actualResult == expectedResult)
}

Expand Down Expand Up @@ -1723,7 +1723,7 @@ class dataFrameAPITestScala extends TestBaseScala {
"SELECT ST_GeomFromWKT('Polygon ((0 0, 1 2, 2 2, 3 2, 5 0, 4 0, 3 1, 2 1, 1 0, 0 0))') as geom")
val df = baseDF.select(ST_MakeValid(ST_Collect(ST_H3ToGeom(ST_H3CellIDs("geom", 6, true)))))
val actualResult = df.take(1)(0).getAs[Geometry](0)
val targetShape = baseDF.take(1)(0).getAs[Polygon](0);
val targetShape = baseDF.take(1)(0).getAs[Polygon](0)
assert(actualResult.contains(targetShape))
}

Expand Down Expand Up @@ -1982,7 +1982,7 @@ class dataFrameAPITestScala extends TestBaseScala {
val polyDf = sparkSession.sql(
"SELECT ST_GeomFromWKT('POLYGON ((1 0 1, 1 1 1, 2 1 1, 2 0 1, 1 0 1))') AS geom")
val df = polyDf.select(ST_Translate("geom", 2, 3, 1))
val wktWriter3D = new WKTWriter(3);
val wktWriter3D = new WKTWriter(3)
val actualGeom = df.take(1)(0).get(0).asInstanceOf[Geometry]
val actual = wktWriter3D.write(actualGeom)
val expected = "POLYGON Z((3 3 2, 3 4 2, 4 4 2, 4 3 2, 3 3 2))"
Expand All @@ -1999,7 +1999,7 @@ class dataFrameAPITestScala extends TestBaseScala {
val polyDf = sparkSession.sql(
"SELECT ST_GeomFromWKT('MULTIPOINT (0 0, 2 2)') AS geom, ST_Buffer(ST_GeomFromWKT('POINT(1 1)'), 10.0) as buf")
val df = polyDf.select(ST_VoronoiPolygons("geom"))
val wktWriter3D = new WKTWriter(3);
val wktWriter3D = new WKTWriter(3)
val actualGeom = df.take(1)(0).get(0).asInstanceOf[Geometry]
val actual = wktWriter3D.write(actualGeom)
val expected =
Expand All @@ -2019,9 +2019,9 @@ class dataFrameAPITestScala extends TestBaseScala {
it("Passed ST_Affine") {
val polyDf = sparkSession.sql(
"SELECT ST_GeomFromWKT('POLYGON ((2 3 1, 4 5 1, 7 8 2, 2 3 1))') AS geom")
val df = polyDf.select(ST_Affine("geom", 1, 2, 3, 3, 4, 4, 1, 4, 2, 1, 2, 1));
val df = polyDf.select(ST_Affine("geom", 1, 2, 3, 3, 4, 4, 1, 4, 2, 1, 2, 1))
val dfDefaultValue = polyDf.select(ST_Affine("geom", 1, 2, 1, 2, 1, 2))
val wKTWriter3D = new WKTWriter(3);
val wKTWriter3D = new WKTWriter(3)
val actualGeom = df.take(1)(0).get(0).asInstanceOf[Geometry]
val actualGeomDefaultValue = dfDefaultValue.take(1)(0).get(0).asInstanceOf[Geometry]
val actual = wKTWriter3D.write(actualGeom)
Expand All @@ -2036,7 +2036,7 @@ class dataFrameAPITestScala extends TestBaseScala {
val polyDf = sparkSession.sql(
"SELECT ST_GeomFromWKT('POLYGON ((1 0 1, 2 3 2, 5 0 1, 5 2 9, 1 0 1))') AS geom")
val df = polyDf.select(ST_BoundingDiagonal("geom"))
val wKTWriter = new WKTWriter(3);
val wKTWriter = new WKTWriter(3)
val expected = "LINESTRING Z(1 0 1, 5 3 9)"
val actual = wKTWriter.write(df.take(1)(0).get(0).asInstanceOf[Geometry])
assertEquals(expected, actual)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
var functionDf =
sparkSession.sql("select ST_ConcaveHull(polygondf.countyshape, 1, true) from polygondf")
assert(functionDf.count() > 0);
assert(functionDf.count() > 0)
}

it("Passed ST_ConvexHull") {
Expand All @@ -76,7 +76,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
var functionDf =
sparkSession.sql("select ST_ConvexHull(polygondf.countyshape) from polygondf")
assert(functionDf.count() > 0);
assert(functionDf.count() > 0)
}

it("Passed ST_CrossesDateLine") {
Expand Down Expand Up @@ -107,7 +107,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
val functionDf =
sparkSession.sql("select ST_Buffer(polygondf.countyshape, 1) from polygondf")
assert(functionDf.count() > 0);
assert(functionDf.count() > 0)
}

it("Passed ST_Buffer Spheroid") {
Expand All @@ -123,11 +123,11 @@ class functionTestScala

var functionDf =
sparkSession.sql("select ST_Buffer(polygondf.countyshape, 1, true) from polygondf")
assert(functionDf.count() > 0);
assert(functionDf.count() > 0)

functionDf = sparkSession.sql(
"select ST_Buffer(polygondf.countyshape, 1, true, 'quad_segs=2') from polygondf")
assert(functionDf.count() > 0);
assert(functionDf.count() > 0)
}

it("Passed ST_BestSRID") {
Expand All @@ -142,7 +142,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
val functionDf =
sparkSession.sql("select ST_BestSRID(polygondf.countyshape) from polygondf")
assert(functionDf.count() > 0);
assert(functionDf.count() > 0)
}

it("Passed ST_ShiftLongitude") {
Expand All @@ -157,7 +157,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
val functionDf =
sparkSession.sql("select ST_ShiftLongitude(polygondf.countyshape) from polygondf")
assert(functionDf.count() > 0);
assert(functionDf.count() > 0)
}

it("Passed ST_Envelope") {
Expand All @@ -172,7 +172,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
var functionDf =
sparkSession.sql("select ST_Envelope(polygondf.countyshape) from polygondf")
assert(functionDf.count() > 0);
assert(functionDf.count() > 0)
}

it("Passed ST_Expand") {
Expand Down Expand Up @@ -236,7 +236,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
var functionDf =
sparkSession.sql("select ST_Centroid(polygondf.countyshape) from polygondf")
assert(functionDf.count() > 0);
assert(functionDf.count() > 0)
}

it("Passed ST_Length") {
Expand All @@ -250,7 +250,7 @@ class functionTestScala
"select ST_GeomFromWKT(polygontable._c0) as countyshape from polygontable")
polygonDf.createOrReplaceTempView("polygondf")
var functionDf = sparkSession.sql("select ST_Length(polygondf.countyshape) from polygondf")
assert(functionDf.count() > 0);
assert(functionDf.count() > 0)
}

it("Passed ST_Length2D") {
Expand All @@ -265,7 +265,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
var functionDf =
sparkSession.sql("select ST_Length2D(polygondf.countyshape) from polygondf")
assert(functionDf.count() > 0);
assert(functionDf.count() > 0)
}

it("Passed ST_Area") {
Expand All @@ -279,7 +279,7 @@ class functionTestScala
"select ST_GeomFromWKT(polygontable._c0) as countyshape from polygontable")
polygonDf.createOrReplaceTempView("polygondf")
var functionDf = sparkSession.sql("select ST_Area(polygondf.countyshape) from polygondf")
assert(functionDf.count() > 0);
assert(functionDf.count() > 0)
}

it("Passed ST_Dimension with Geometry") {
Expand Down Expand Up @@ -325,7 +325,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
var functionDf = sparkSession.sql(
"select ST_Distance(polygondf.countyshape, polygondf.countyshape) from polygondf")
assert(functionDf.count() > 0);
assert(functionDf.count() > 0)
}

it("Passed ST_3DDistance") {
Expand Down Expand Up @@ -1849,37 +1849,37 @@ class functionTestScala
val actualDf = baseDf.selectExpr("ST_RemoveRepeatedPoints(geom, 1000) as geom")
var actual = actualDf.selectExpr("ST_AsText(geom)").first().get(0)
var expected =
"GEOMETRYCOLLECTION (POINT (10 10), LINESTRING (20 20, 30 30), POLYGON ((40 40, 70 70, 70 70, 40 40)), MULTIPOINT ((80 80)))";
"GEOMETRYCOLLECTION (POINT (10 10), LINESTRING (20 20, 30 30), POLYGON ((40 40, 70 70, 70 70, 40 40)), MULTIPOINT ((80 80)))"
assertEquals(expected, actual)
val actualSRID = actualDf.selectExpr("ST_SRID(geom)").first().get(0)
assertEquals(1000, actualSRID)

actual = sparkSession
.sql("SELECT ST_AsText(ST_RemoveRepeatedPoints(ST_GeomFromWKT('MULTIPOINT ((1 1), (2 2), (3 3), (2 2))')))")
.first()
.get(0);
.get(0)
expected = "MULTIPOINT ((1 1), (2 2), (3 3))"
assertEquals(expected, actual)

actual = sparkSession
.sql("SELECT ST_AsText(ST_RemoveRepeatedPoints(ST_GeomFromWKT('LINESTRING (0 0, 0 0, 1 1, 0 0, 1 1, 2 2)')))")
.first()
.get(0);
.get(0)
expected = "LINESTRING (0 0, 1 1, 0 0, 1 1, 2 2)"
assertEquals(expected, actual)

actual = sparkSession
.sql("SELECT ST_AsText(ST_RemoveRepeatedPoints(ST_GeomFromWKT('GEOMETRYCOLLECTION (LINESTRING (1 1, 2 2, 2 2, 3 3), POINT (4 4), POINT (4 4), POINT (5 5))')))")
.first()
.get(0);
.get(0)
expected =
"GEOMETRYCOLLECTION (LINESTRING (1 1, 2 2, 3 3), POINT (4 4), POINT (4 4), POINT (5 5))"
assertEquals(expected, actual)

actual = sparkSession
.sql("SELECT ST_AsText(ST_RemoveRepeatedPoints(ST_GeomFromWKT('LINESTRING (0 0, 0 0, 1 1, 5 5, 1 1, 2 2)'), 2))")
.first()
.get(0);
.get(0)
expected = "LINESTRING (0 0, 5 5, 2 2)"
assertEquals(expected, actual)
}
Expand Down Expand Up @@ -2916,7 +2916,7 @@ class functionTestScala
.get(1)
.asInstanceOf[String]
assertEquals(expected, actual)
assertEquals(expectedDefaultValue, actualDefaultValue);
assertEquals(expectedDefaultValue, actualDefaultValue)
}
}

Expand All @@ -2941,7 +2941,7 @@ class functionTestScala
.get(1)
.asInstanceOf[String]
assertEquals(expected, actual)
assertEquals(expectedDefaultValue, actualDefaultValue);
assertEquals(expectedDefaultValue, actualDefaultValue)
}
}

Expand All @@ -2966,7 +2966,7 @@ class functionTestScala
.get(1)
.asInstanceOf[String]
assertEquals(expected, actual)
assertEquals(expectedDefaultValue, actualDefaultValue);
assertEquals(expectedDefaultValue, actualDefaultValue)
}
}

Expand All @@ -2991,7 +2991,7 @@ class functionTestScala
.get(1)
.asInstanceOf[String]
assertEquals(expected, actual)
assertEquals(expectedDefaultValue, actualDefaultValue);
assertEquals(expectedDefaultValue, actualDefaultValue)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ class predicateTestScala extends TestBaseScala {

it("Passed ST_Relate") {
val baseDf = sparkSession.sql(
"SELECT ST_GeomFromWKT('LINESTRING (1 1, 5 5)') AS g1, ST_GeomFromWKT('POLYGON ((3 3, 3 7, 7 7, 7 3, 3 3))') as g2, '1010F0212' as im");
"SELECT ST_GeomFromWKT('LINESTRING (1 1, 5 5)') AS g1, ST_GeomFromWKT('POLYGON ((3 3, 3 7, 7 7, 7 3, 3 3))') as g2, '1010F0212' as im")
val actual = baseDf.selectExpr("ST_Relate(g1, g2)").first().get(0)
assert(actual.equals("1010F0212"))

Expand Down
Loading
Loading