diff --git a/.github/workflows/snapshot-publish.yml b/.github/workflows/snapshot-publish.yml
index 75e09a7eb..1e3367155 100644
--- a/.github/workflows/snapshot-publish.yml
+++ b/.github/workflows/snapshot-publish.yml
@@ -27,7 +27,7 @@ jobs:
java-version: 11
- name: Publish to Local Maven
- run: sbt publishM2
+ run: sbt standaloneCosmetic/publishM2
- uses: actions/checkout@v3
with:
@@ -35,11 +35,18 @@ jobs:
path: 'build'
- name: Configure AWS credentials
- uses: aws-actions/configure-aws-credentials@v2
+ uses: aws-actions/configure-aws-credentials@v1.7.0
with:
role-to-assume: ${{ secrets.PUBLISH_SNAPSHOTS_ROLE }}
aws-region: us-east-1
+ - name: generate sha and md5
+ run: |
+ for i in `find ${HOME}/.m2/repository/org/opensearch/ -name "*.pom" -type f`; do sha512sum "$i" | awk '{print $1}' >> "$i.sha512"; done
+ for i in `find ${HOME}/.m2/repository/org/opensearch/ -name "*.jar" -type f`; do sha512sum "$i" | awk '{print $1}' >> "$i.sha512"; done
+ for i in `find ${HOME}/.m2/repository/org/opensearch/ -name "*.pom" -type f`; do sha256sum "$i" | awk '{print $1}' >> "$i.sha256"; done
+ for i in `find ${HOME}/.m2/repository/org/opensearch/ -name "*.jar" -type f`; do sha256sum "$i" | awk '{print $1}' >> "$i.sha256"; done
+
- name: Get credentials and publish snapshots to maven
run: |
export SONATYPE_USERNAME=$(aws secretsmanager get-secret-value --secret-id maven-snapshots-username --query SecretString --output text)
@@ -47,4 +54,6 @@ jobs:
echo "::add-mask::$SONATYPE_USERNAME"
echo "::add-mask::$SONATYPE_PASSWORD"
export SNAPSHOT_REPO_URL="https://aws.oss.sonatype.org/content/repositories/snapshots/"
- build/resources/publish/publish-snapshot.sh $HOME/.m2
+ cd build/resources/publish/
+ cp -a $HOME/.m2/repository/* ./
+ ./publish-snapshot.sh ./
diff --git a/README.md b/README.md
index c0a604b09..1b46e5e47 100644
--- a/README.md
+++ b/README.md
@@ -27,7 +27,11 @@ spark-sql --conf "spark.sql.extensions=org.opensearch.flint.FlintSparkExtensions
To build and run this application with Spark, you can run:
```
-sbt clean publishLocal
+sbt clean standaloneCosmetic/publishM2
+```
+then add org.opensearch:opensearch-spark_2.12 when run spark application, for example,
+```
+bin/spark-shell --packages "org.opensearch:opensearch-spark_2.12:0.1.0-SNAPSHOT"
```
## Code of Conduct
@@ -44,4 +48,4 @@ See the [LICENSE](../LICENSE.txt) file for our project's licensing. We will ask
## Copyright
-Copyright OpenSearch Contributors. See [NOTICE](../NOTICE) for details.
\ No newline at end of file
+Copyright OpenSearch Contributors. See [NOTICE](../NOTICE) for details.
diff --git a/build.sbt b/build.sbt
index 8414da3db..b6a232796 100644
--- a/build.sbt
+++ b/build.sbt
@@ -5,7 +5,7 @@
import Dependencies._
lazy val scala212 = "2.12.14"
-lazy val sparkVersion = "3.3.1"
+lazy val sparkVersion = "3.3.2"
lazy val opensearchVersion = "2.6.0"
ThisBuild / organization := "org.opensearch"
@@ -33,6 +33,8 @@ lazy val compileScalastyle = taskKey[Unit]("compileScalastyle")
lazy val testScalastyle = taskKey[Unit]("testScalastyle")
lazy val commonSettings = Seq(
+ javacOptions ++= Seq("-source", "11"),
+ Compile / compile / javacOptions ++= Seq("-target", "11"),
// Scalastyle
scalastyleConfig := (ThisBuild / scalastyleConfig).value,
compileScalastyle := (Compile / scalastyle).toTask("").value,
@@ -43,11 +45,12 @@ lazy val commonSettings = Seq(
lazy val root = (project in file("."))
.aggregate(flintCore, flintSparkIntegration)
.disablePlugins(AssemblyPlugin)
- .settings(name := "flint")
+ .settings(name := "flint", publish / skip := true)
lazy val flintCore = (project in file("flint-core"))
.disablePlugins(AssemblyPlugin)
.settings(
+ commonSettings,
name := "flint-core",
scalaVersion := scala212,
libraryDependencies ++= Seq(
@@ -55,19 +58,20 @@ lazy val flintCore = (project in file("flint-core"))
"org.opensearch.client" % "opensearch-rest-high-level-client" % opensearchVersion
exclude ("org.apache.logging.log4j", "log4j-api"),
"com.amazonaws" % "aws-java-sdk" % "1.12.397" % "provided"
- exclude ("com.fasterxml.jackson.core", "jackson-databind")))
+ exclude ("com.fasterxml.jackson.core", "jackson-databind")),
+ publish / skip := true)
lazy val flintSparkIntegration = (project in file("flint-spark-integration"))
.dependsOn(flintCore)
.enablePlugins(AssemblyPlugin, Antlr4Plugin)
.settings(
commonSettings,
- name := "flint-spark",
+ name := "flint-spark-integration",
scalaVersion := scala212,
libraryDependencies ++= Seq(
"com.amazonaws" % "aws-java-sdk" % "1.12.397" % "provided"
exclude ("com.fasterxml.jackson.core", "jackson-databind"),
- "org.scalactic" %% "scalactic" % "3.2.15",
+ "org.scalactic" %% "scalactic" % "3.2.15" % "test",
"org.scalatest" %% "scalatest" % "3.2.15" % "test",
"org.scalatest" %% "scalatest-flatspec" % "3.2.15" % "test",
"org.scalatestplus" %% "mockito-4-6" % "3.2.15.0" % "test",
@@ -111,3 +115,23 @@ lazy val integtest = (project in file("integ-test"))
"org.testcontainers" % "testcontainers" % "1.18.0" % "test"),
libraryDependencies ++= deps(sparkVersion),
Test / fullClasspath += (flintSparkIntegration / assembly).value)
+
+lazy val standaloneCosmetic = project
+ .settings(
+ name := "opensearch-spark-standalone",
+ commonSettings,
+ releaseSettings,
+ exportJars := true,
+ Compile / packageBin := (flintSparkIntegration / assembly).value)
+
+lazy val releaseSettings = Seq(
+ publishMavenStyle := true,
+ publishArtifact := true,
+ Test / publishArtifact := false,
+ licenses += ("Apache-2.0", url("http://www.apache.org/licenses/LICENSE-2.0")),
+ pomExtra :=
+ https://opensearch.org/
+
+ git@github.com:opensearch-project/opensearch-spark.git
+ scm:git:git@github.com:opensearch-project/opensearch-spark.git
+ )