diff --git a/.github/workflows/github-action-build.yml b/.github/workflows/github-action-build.yml index 2f011fde3bb..4c07bd728ca 100644 --- a/.github/workflows/github-action-build.yml +++ b/.github/workflows/github-action-build.yml @@ -53,11 +53,10 @@ jobs: - name: Prod build run: yarn --cwd core/gui run build:ci - amber: + core: strategy: matrix: - os: - - ubuntu-latest + os: [ ubuntu-22.04 ] java-version: [ 11 ] runs-on: ${{ matrix.os }} env: @@ -66,30 +65,6 @@ jobs: steps: - name: Prepare ENV run: sudo apt-get install libncurses5 - - name: Checkout Texera - uses: actions/checkout@v2 - - name: Setup Java - uses: actions/setup-java@v2 - with: - distribution: 'temurin' - java-version: ${{ matrix.java-version }} - - uses: coursier/cache-action@v6 - with: - extraSbtFiles: '["core/amber/*.sbt", "core/amber/project/**.{scala,sbt}", "core/amber/project/build.properties" ]' - - name: Lint with scalafix & scalafmt - run: cd core/amber && sbt "scalafixAll --check" && sbt scalafmtCheckAll - - name: Compile with sbt - run: cd core/amber && sbt clean package - - name: Run backend tests - run: cd core/amber && sbt -v -J-Xmx2G test - - core: - strategy: - matrix: - os: [ ubuntu-latest ] - java-version: [ 11 ] - runs-on: ${{ matrix.os }} - steps: - name: Checkout Texera uses: actions/checkout@v2 - name: Setup Java diff --git a/.gitignore b/.gitignore index cda7b56ffe3..4fbd384afe6 100644 --- a/.gitignore +++ b/.gitignore @@ -102,4 +102,10 @@ StoredCredential* **/apache2/ **/Apache24/ **/php/ -Composer-Setup.exe \ No newline at end of file +Composer-Setup.exe + +# Ignoring folders generated by vscode IDE +.metals/ +.bloop/ +.ammonite/ +metals.sbt \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 8bc0543efb7..2977cbc7aa4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -21,8 +21,6 @@ FROM sbtscala/scala-sbt:eclipse-temurin-jammy-11.0.17_8_1.9.3_2.13.11 WORKDIR /core COPY core/ . -WORKDIR /core/amber -RUN sbt clean package RUN apt-get update RUN apt-get install -y netcat unzip python3-pip RUN pip3 install python-lsp-server python-lsp-server[websockets] @@ -34,10 +32,12 @@ WORKDIR /core COPY .git ../.git COPY --from=nodegui /gui/dist ./gui/dist -RUN scripts/build-docker.sh +RUN scripts/build-services.sh CMD ["scripts/deploy-docker.sh"] EXPOSE 8080 -EXPOSE 9090 \ No newline at end of file +EXPOSE 9090 + +EXPOSE 8085 \ No newline at end of file diff --git a/README.md b/README.md index c1dd5cf0c38..99fe5967044 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@
-
+
Texera supports scalable data computation and enables advanced AI/ML techniques.
@@ -9,9 +9,13 @@
-
-
-
- Data Science for All - An NSF-funded summer program to teach high-school students data science and AI/ML - |
-
-
-
-
- ICS 80: Data Science and AI/ML Using Workflows - A Spring 2024 course at UCI, teaching 42 undergraduates, most of whom are not computer science majors, to learn data science and AI/ML - |
-
-
-
-
- dkNET Webinar 04/26/2024 - |
-
-
-
-
- Texera Demo @ VLDB'20 - |
-
-
-
-
- Amber Presentation @ VLDB'20 - |
-
src/main/scala/edu/uci/ics/texera/workflow/common/tuple/schema/AttributeTypeUtils.scala
- * Provide parsing, inferring, and casting logic between other AttributeTypes.
- *
- * 2. SQLSourceOpDesc
- * src/main/scala/edu/uci/ics/texera/workflow/operators/source/sql/SQLSourceOpDesc
- * Especially SQLSources will need to map the input schema to Texera.Schema. AttributeType
- * needs to be converted from original source types accordingly.
- *
- * 3. FilterPredicate
- * src/main/scala/edu/uci/ics/texera/workflow/operators/filter/FilterPredicate.java
- * FilterPredicate takes in AttributeTypes and converts them into a comparable type, then do
- * the comparison. New AttributeTypes needs to be mapped to a comparable type there.
- *
- * 4. SpecializedAverageOpDesc.getNumericalValue
- * src/main/scala/edu/uci/ics/texera/workflow/operators/aggregate/SpecializedAverageOpDesc.scala
- * New AttributeTypes might need to be converted into a numerical value in order to perform
- * aggregations.
- *
- * 5. SchemaPropagationService.SchemaAttribute
- * src/app/workspace/service/dynamic-schema/schema-propagation/schema-propagation.service.ts
- * Declare the frontend SchemaAttribute for the new AttributeType.
- *
- * 6. ArrowUtils (Java)
- * src/main/scala/edu/uci/ics/amber/engine/architecture/pythonworker/ArrowUtils.scala
- * Provide java-side conversion between ArrowType and AttributeType.
- *
- * 7. ArrowUtils (Python)
- * src/main/python/core/util/arrow_utils.py
- * Provide python-side conversion between ArrowType and AttributeType.
- */
-
-
- // A field that is indexed but not tokenized: the entire String
- // value is indexed as a single token
- STRING("string", String.class),
- INTEGER("integer", Integer.class),
- LONG("long", Long.class),
- DOUBLE("double", Double.class),
- BOOLEAN("boolean", Boolean.class),
- TIMESTAMP("timestamp", Timestamp.class),
- BINARY("binary", byte[].class),
- ANY("ANY", Object.class);
-
- private final String name;
- private final Class> fieldClass;
-
- AttributeType(String name, Class> fieldClass) {
- this.name = name;
- this.fieldClass = fieldClass;
- }
-
- @JsonValue
- public String getName() {
- if (this.name.equals(ANY.name)) {
- // exclude this enum type in JSON schema
- // JSON schema generator will ignore empty enum type
- return "";
- }
- return this.name;
- }
-
- public Class> getFieldClass() {
- return this.fieldClass;
- }
-
- public static AttributeType getAttributeType(Class> fieldClass) {
- if (fieldClass.equals(String.class)) {
- return STRING;
- } else if (fieldClass.equals(Integer.class)) {
- return INTEGER;
- } else if (fieldClass.equals(Long.class)) {
- return LONG;
- } else if (fieldClass.equals(Double.class)) {
- return DOUBLE;
- } else if (fieldClass.equals(Boolean.class)) {
- return BOOLEAN;
- } else if (fieldClass.equals(Timestamp.class)) {
- return TIMESTAMP;
- } else if (fieldClass.equals(byte[].class)) {
- return BINARY;
- } else {
- return ANY;
- }
- }
-
- @Override
- public String toString() {
- return this.getName();
- }
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/model/tuple/AttributeTypeUtils.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/model/tuple/AttributeTypeUtils.scala
deleted file mode 100644
index a2d8c34c5a2..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/model/tuple/AttributeTypeUtils.scala
+++ /dev/null
@@ -1,376 +0,0 @@
-package edu.uci.ics.amber.engine.common.model.tuple
-
-import com.github.sisyphsu.dateparser.DateParserUtils
-
-import java.sql.Timestamp
-import java.text.NumberFormat
-import java.util.Locale
-import scala.util.Try
-import scala.util.control.Exception.allCatch
-
-object AttributeTypeUtils extends Serializable {
-
- /**
- * this loop check whether the current attribute in the array is the attribute for casting,
- * if it is, change it to result type
- * if it's not, remain the same type
- * we need this loop to keep the order the same as the original
- * @param schema schema of data
- * @param attribute selected attribute
- * @param resultType casting type
- * @return schema of data
- */
- def SchemaCasting(
- schema: Schema,
- attribute: String,
- resultType: AttributeType
- ): Schema = {
- // need a builder to maintain the order of original schema
- val builder = Schema.builder()
- val attributes: List[Attribute] = schema.getAttributes
- // change the schema when meet selected attribute else remain the same
- for (i <- attributes.indices) {
- if (attributes.apply(i).getName.equals(attribute)) {
- resultType match {
- case AttributeType.STRING | AttributeType.INTEGER | AttributeType.DOUBLE |
- AttributeType.LONG | AttributeType.BOOLEAN | AttributeType.TIMESTAMP |
- AttributeType.BINARY =>
- builder.add(attribute, resultType)
- case AttributeType.ANY | _ =>
- builder.add(attribute, attributes.apply(i).getType)
- }
- } else {
- builder.add(attributes.apply(i).getName, attributes.apply(i).getType)
- }
- }
- builder.build()
- }
-
- /**
- * Casts the fields of a tuple to new types according to a list of type casting units,
- * producing a new tuple that conforms to the specified type changes.
- * Each type casting unit specifies the attribute name and the target type to cast to.
- * If an attribute name in the tuple does not have a corresponding type casting unit,
- * its value is included in the result tuple without type conversion.
- *
- * @param tuple The source tuple whose fields are to be casted.
- * @param targetTypes A mapping of attribute names to their target types, which specifies how to cast each field.
- * If an attribute is not present in the map, no casting is applied to it.
- * @return A new instance of TupleLike with fields casted to the target types
- * as specified by the typeCastingUnits.
- */
- def tupleCasting(
- tuple: Tuple,
- targetTypes: Map[String, AttributeType]
- ): TupleLike =
- TupleLike(
- tuple.getSchema.getAttributes.map { attr =>
- val targetType = targetTypes.getOrElse(attr.getName, attr.getType)
- parseField(tuple.getField(attr.getName), targetType, force = true)
- }
- )
-
- def parseFields(fields: Array[Any], schema: Schema): Array[Any] = {
- parseFields(fields, schema.getAttributes.map(attr => attr.getType).toArray)
- }
-
- /**
- * parse Fields to corresponding Java objects base on the given Schema AttributeTypes
- * @param attributeTypes Schema AttributeTypeList
- * @param fields fields value
- * @return parsedFields in the target AttributeTypes
- */
- @throws[AttributeTypeException]
- def parseFields(
- fields: Array[Any],
- attributeTypes: Array[AttributeType]
- ): Array[Any] = {
- fields.indices.map(i => parseField(fields(i), attributeTypes(i))).toArray
- }
-
- /**
- * parse Field to a corresponding Java object base on the given Schema AttributeType
- * @param field fields value
- * @param attributeType target AttributeType
- * @param force force to parse the field to the target type if possible
- * currently only support for comma-separated numbers
- *
- * @return parsedField in the target AttributeType
- */
- @throws[AttributeTypeException]
- def parseField(
- field: Any,
- attributeType: AttributeType,
- force: Boolean = false
- ): Any = {
- if (field == null) return null
- attributeType match {
- case AttributeType.INTEGER => parseInteger(field, force)
- case AttributeType.LONG => parseLong(field, force)
- case AttributeType.DOUBLE => parseDouble(field)
- case AttributeType.BOOLEAN => parseBoolean(field)
- case AttributeType.TIMESTAMP => parseTimestamp(field)
- case AttributeType.STRING => field.toString
- case AttributeType.BINARY => field
- case AttributeType.ANY | _ => field
- }
- }
-
- @throws[AttributeTypeException]
- private def parseInteger(fieldValue: Any, force: Boolean = false): Integer = {
- val attempt: Try[Integer] = Try {
- fieldValue match {
- case str: String =>
- if (force) {
- // Use US locale for comma-separated numbers
- NumberFormat.getNumberInstance(Locale.US).parse(str.trim).intValue()
- } else {
- str.trim.toInt
- }
- case int: Integer => int
- case long: java.lang.Long => long.toInt
- case double: java.lang.Double => double.toInt
- case boolean: java.lang.Boolean => if (boolean) 1 else 0
- // Timestamp and Binary are considered to be illegal here.
- case _ =>
- throw new IllegalArgumentException(
- s"Unsupported type for parsing to Integer: ${fieldValue.getClass.getName}"
- )
- }
- }
-
- attempt.recover {
- case e: Exception =>
- throw new AttributeTypeException(
- s"Failed to parse type ${fieldValue.getClass.getName} to Integer: ${fieldValue.toString}",
- e
- )
- }.get
- }
-
- @throws[AttributeTypeException]
- private def parseLong(fieldValue: Any, force: Boolean = false): java.lang.Long = {
- val attempt: Try[Long] = Try {
- fieldValue match {
- case str: String =>
- if (force) {
- // Use US locale for comma-separated numbers
- NumberFormat.getNumberInstance(Locale.US).parse(str.trim).longValue()
- } else {
- str.trim.toLong
- }
- case int: Integer => int.toLong
- case long: java.lang.Long => long
- case double: java.lang.Double => double.toLong
- case boolean: java.lang.Boolean => if (boolean) 1L else 0L
- case timestamp: Timestamp => timestamp.toInstant.toEpochMilli
- // Binary is considered to be illegal here.
- case _ =>
- throw new IllegalArgumentException(
- s"Unsupported type for parsing to Long: ${fieldValue.getClass.getName}"
- )
- }
- }
- attempt.recover {
- case e: Exception =>
- throw new AttributeTypeException(
- s"Failed to parse type ${fieldValue.getClass.getName} to Long: ${fieldValue.toString}",
- e
- )
- }.get
- }
-
- @throws[AttributeTypeException]
- def parseTimestamp(fieldValue: Any): Timestamp = {
- val attempt: Try[Timestamp] = Try {
- fieldValue match {
- case str: String => new Timestamp(DateParserUtils.parseDate(str.trim).getTime)
- case long: java.lang.Long => new Timestamp(long)
- case timestamp: Timestamp => timestamp
- case date: java.util.Date => new Timestamp(date.getTime)
- // Integer, Double, Boolean, Binary are considered to be illegal here.
- case _ =>
- throw new AttributeTypeException(
- s"Unsupported type for parsing to Timestamp: ${fieldValue.getClass.getName}"
- )
- }
- }
-
- attempt.recover {
- case e: Exception =>
- throw new AttributeTypeException(
- s"Failed to parse type ${fieldValue.getClass.getName} to Timestamp: ${fieldValue.toString}",
- e
- )
- }.get
-
- }
-
- @throws[AttributeTypeException]
- def parseDouble(fieldValue: Any): java.lang.Double = {
- val attempt: Try[Double] = Try {
- fieldValue match {
- case str: String => str.trim.toDouble
- case int: Integer => int.toDouble
- case long: java.lang.Long => long.toDouble
- case double: java.lang.Double => double
- case boolean: java.lang.Boolean => if (boolean) 1 else 0
- // Timestamp and Binary are considered to be illegal here.
- case _ =>
- throw new AttributeTypeException(
- s"Unsupported type for parsing to Double: ${fieldValue.getClass.getName}"
- )
- }
- }
-
- attempt.recover {
- case e: Exception =>
- throw new AttributeTypeException(
- s"Failed to parse type ${fieldValue.getClass.getName} to Double: ${fieldValue.toString}",
- e
- )
- }.get
-
- }
-
- @throws[AttributeTypeException]
- private def parseBoolean(fieldValue: Any): java.lang.Boolean = {
- val attempt: Try[Boolean] = Try {
- fieldValue match {
- case str: String =>
- (Try(str.trim.toBoolean) orElse Try(str.trim.toInt == 1)).get
- case int: Integer => int != 0
- case long: java.lang.Long => long != 0
- case double: java.lang.Double => double != 0
- case boolean: java.lang.Boolean => boolean
- // Timestamp and Binary are considered to be illegal here.
- case _ =>
- throw new AttributeTypeException(
- s"Unsupported type for parsing to Boolean: ${fieldValue.getClass.getName}"
- )
- }
- }
-
- attempt.recover {
- case e: Exception =>
- throw new AttributeTypeException(
- s"Failed to parse type ${fieldValue.getClass.getName} to Boolean: ${fieldValue.toString}",
- e
- )
- }.get
- }
-
- /**
- * Infers field types of a given row of data. The given attributeTypes will be updated
- * through each iteration of row inference, to contain the most accurate inference.
- * @param attributeTypes AttributeTypes that being passed to each iteration.
- * @param fields data fields to be parsed
- * @return
- */
- private def inferRow(
- attributeTypes: Array[AttributeType],
- fields: Array[Any]
- ): Unit = {
- for (i <- fields.indices) {
- attributeTypes.update(i, inferField(attributeTypes.apply(i), fields.apply(i)))
- }
- }
-
- /**
- * Infers field types of a given row of data.
- * @param fieldsIterator iterator of field arrays to be parsed.
- * each field array should have exact same order and length.
- * @return AttributeType array
- */
- def inferSchemaFromRows(fieldsIterator: Iterator[Array[Any]]): Array[AttributeType] = {
- var attributeTypes: Array[AttributeType] = Array()
-
- for (fields <- fieldsIterator) {
- if (attributeTypes.isEmpty) {
- attributeTypes = Array.fill[AttributeType](fields.length)(AttributeType.INTEGER)
- }
- inferRow(attributeTypes, fields)
- }
- attributeTypes
- }
-
- /**
- * infer filed type with only data field
- * @param fieldValue data field to be parsed, original as String field
- * @return inferred AttributeType
- */
- def inferField(fieldValue: Any): AttributeType = {
- tryParseInteger(fieldValue)
- }
-
- private def tryParseInteger(fieldValue: Any): AttributeType = {
- if (fieldValue == null)
- return AttributeType.INTEGER
- allCatch opt parseInteger(fieldValue) match {
- case Some(_) => AttributeType.INTEGER
- case None => tryParseLong(fieldValue)
- }
- }
-
- private def tryParseLong(fieldValue: Any): AttributeType = {
- if (fieldValue == null)
- return AttributeType.LONG
- allCatch opt parseLong(fieldValue) match {
- case Some(_) => AttributeType.LONG
- case None => tryParseTimestamp(fieldValue)
- }
- }
-
- private def tryParseTimestamp(fieldValue: Any): AttributeType = {
- if (fieldValue == null)
- return AttributeType.TIMESTAMP
- allCatch opt parseTimestamp(fieldValue) match {
- case Some(_) => AttributeType.TIMESTAMP
- case None => tryParseDouble(fieldValue)
- }
- }
-
- private def tryParseDouble(fieldValue: Any): AttributeType = {
- if (fieldValue == null)
- return AttributeType.DOUBLE
- allCatch opt parseDouble(fieldValue) match {
- case Some(_) => AttributeType.DOUBLE
- case None => tryParseBoolean(fieldValue)
- }
- }
-
- private def tryParseBoolean(fieldValue: Any): AttributeType = {
- if (fieldValue == null)
- return AttributeType.BOOLEAN
- allCatch opt parseBoolean(fieldValue) match {
- case Some(_) => AttributeType.BOOLEAN
- case None => tryParseString()
- }
- }
-
- private def tryParseString(): AttributeType = {
- AttributeType.STRING
- }
-
- /**
- * InferField when get both typeSofar and tuple string
- * @param attributeType typeSofar
- * @param fieldValue data field to be parsed, original as String field
- * @return inferred AttributeType
- */
- def inferField(attributeType: AttributeType, fieldValue: Any): AttributeType = {
- attributeType match {
- case AttributeType.STRING => tryParseString()
- case AttributeType.BOOLEAN => tryParseBoolean(fieldValue)
- case AttributeType.DOUBLE => tryParseDouble(fieldValue)
- case AttributeType.LONG => tryParseLong(fieldValue)
- case AttributeType.INTEGER => tryParseInteger(fieldValue)
- case AttributeType.TIMESTAMP => tryParseTimestamp(fieldValue)
- case AttributeType.BINARY => tryParseString()
- case _ => tryParseString()
- }
- }
-
- class AttributeTypeException(msg: String, cause: Throwable = null)
- extends IllegalArgumentException(msg, cause) {}
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/model/tuple/Schema.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/model/tuple/Schema.scala
deleted file mode 100644
index c8dbdece6bb..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/model/tuple/Schema.scala
+++ /dev/null
@@ -1,205 +0,0 @@
-package edu.uci.ics.amber.engine.common.model.tuple
-
-import com.fasterxml.jackson.annotation.{JsonCreator, JsonIgnore, JsonProperty}
-import com.google.common.base.Preconditions.checkNotNull
-
-import scala.collection.immutable.ListMap
-import scala.collection.mutable
-
-case class Schema @JsonCreator() (
- @JsonProperty(value = "attributes", required = true) attributes: List[Attribute]
-) extends Serializable {
-
- checkNotNull(attributes)
-
- val attributeIndex: Map[String, Int] =
- attributes.view.map(_.getName.toLowerCase).zipWithIndex.toMap
-
- def this(attrs: Attribute*) = {
- this(attrs.toList)
- }
-
- @JsonProperty(value = "attributes")
- def getAttributes: List[Attribute] = attributes
-
- @JsonIgnore
- def getAttributeNames: List[String] = attributes.map(_.getName)
-
- def getIndex(attributeName: String): Int = {
- if (!containsAttribute(attributeName)) {
- throw new RuntimeException(s"$attributeName is not contained in the schema")
- }
- attributeIndex(attributeName.toLowerCase)
- }
-
- def getAttribute(attributeName: String): Attribute = attributes(getIndex(attributeName))
-
- @JsonIgnore
- def containsAttribute(attributeName: String): Boolean =
- attributeIndex.contains(attributeName.toLowerCase)
-
- override def hashCode(): Int = {
- val prime = 31
- var result = 1
- result = prime * result + (if (attributes == null) 0 else attributes.hashCode)
- result = prime * result + (if (attributeIndex == null) 0 else attributeIndex.hashCode)
- result
- }
-
- override def equals(obj: Any): Boolean =
- obj match {
- case that: Schema =>
- this.attributes == that.attributes && this.attributeIndex == that.attributeIndex
- case _ => false
- }
-
- override def toString: String = s"Schema[$attributes]"
-
- def getPartialSchema(attributeNames: List[String]): Schema = {
- Schema(attributeNames.map(name => getAttribute(name)))
- }
-
- /**
- * This method converts to a Schema into a raw format, where each pair of attribute name and attribute type
- * are represented as string. This is for serialization between languages.
- */
- def toRawSchema: Map[String, String] =
- getAttributes.foldLeft(ListMap[String, String]())((list, attr) =>
- list + (attr.getName -> attr.getType.name())
- )
-}
-
-object Schema {
-
- def fromRawSchema(raw: Map[String, String]): Schema = {
- Schema(raw.map {
- case (name, attrType) =>
- new Attribute(name, AttributeType.valueOf(attrType))
- }.toList)
- }
- def builder(): Builder = Builder()
-
- case class Builder(private var attributes: List[Attribute] = List.empty) {
- private val attributeNames: mutable.Set[String] = mutable.Set.empty
-
- def add(attribute: Attribute): Builder = {
- require(attribute != null, "Attribute cannot be null")
- checkAttributeNotExists(attribute.getName)
- attributes ::= attribute
- attributeNames += attribute.getName.toLowerCase
- this
- }
-
- def add(attributeName: String, attributeType: AttributeType): Builder = {
- add(new Attribute(attributeName, attributeType))
- this
- }
-
- def add(attributes: Iterable[Attribute]): Builder = {
- attributes.foreach(add)
- this
- }
-
- def add(attributes: Attribute*): Builder = {
- attributes.foreach(add)
- this
- }
-
- def add(schema: Schema): Builder = {
- checkNotNull(schema)
- add(schema.getAttributes)
- this
- }
-
- def build(): Schema = Schema(attributes.reverse)
-
- /**
- * Removes an attribute from the schema builder if it exists.
- *
- * @param attribute , the name of the attribute
- * @return this Builder object
- */
- def removeIfExists(attribute: String): Builder = {
- checkNotNull(attribute)
- attributes = attributes.filter((attr: Attribute) => !attr.getName.equalsIgnoreCase(attribute))
- attributeNames.remove(attribute.toLowerCase)
- this
- }
-
- /**
- * Removes the attributes from the schema builder if they exist.
- *
- * @param attributes , the names of the attributes
- * @return this Builder object
- */
- def removeIfExists(attributes: Iterable[String]): Builder = {
- checkNotNull(attributes)
- attributes.foreach((attr: String) => checkNotNull(attr))
- attributes.foreach((attr: String) => this.removeIfExists(attr))
- this
- }
-
- /**
- * Removes the attributes from the schema builder if they exist.
- *
- * @param attributes , the names of the attributes
- * @return this Builder object
- */
- def removeIfExists(attributes: String*): Builder = {
- checkNotNull(attributes)
- this.removeIfExists(attributes)
- this
- }
-
- /**
- * Removes an attribute from the schema builder.
- * Fails if the attribute does not exist.
- *
- * @param attribute , the name of the attribute
- * @return this Builder object
- */
- def remove(attribute: String): Builder = {
- checkNotNull(attribute)
- checkAttributeExists(attribute)
- removeIfExists(attribute)
- this
- }
-
- /**
- * Removes the attributes from the schema builder.
- * Fails if an attributes does not exist.
- */
- def remove(attributes: Iterable[String]): Builder = {
- checkNotNull(attributes)
- attributes.foreach(attrName => checkNotNull(attrName))
- attributes.foreach(this.checkAttributeExists)
- this.removeIfExists(attributes)
- this
- }
-
- /**
- * Removes the attributes from the schema builder.
- * Fails if an attributes does not exist.
- *
- * @param attributes
- * @return the builder itself
- */
- def remove(attributes: String*): Builder = {
- checkNotNull(attributes)
- this.remove(attributes)
- this
- }
-
- private def checkAttributeNotExists(attributeName: String): Unit = {
- if (attributeNames.contains(attributeName.toLowerCase)) {
- throw new RuntimeException(s"Attribute $attributeName already exists in the schema")
- }
- }
-
- private def checkAttributeExists(attributeName: String): Unit = {
- if (!attributeNames.contains(attributeName.toLowerCase)) {
- throw new RuntimeException(s"Attribute $attributeName does not exist in the schema")
- }
- }
- }
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/model/tuple/Tuple.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/model/tuple/Tuple.scala
deleted file mode 100644
index 37b046f29fc..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/model/tuple/Tuple.scala
+++ /dev/null
@@ -1,247 +0,0 @@
-package edu.uci.ics.amber.engine.common.model.tuple
-
-import com.fasterxml.jackson.annotation.{JsonCreator, JsonIgnore, JsonProperty}
-import com.fasterxml.jackson.databind.JsonNode
-import com.fasterxml.jackson.databind.node.ObjectNode
-import com.google.common.base.Preconditions.checkNotNull
-import Tuple.checkSchemaMatchesFields
-import TupleUtils.document2Tuple
-import edu.uci.ics.amber.engine.common.Utils
-import org.bson.Document
-import org.ehcache.sizeof.SizeOf
-
-import java.util
-import scala.collection.mutable
-
-class TupleBuildingException(errorMessage: String) extends RuntimeException(errorMessage) {}
-
-/**
- * Represents a tuple in a data processing workflow, encapsulating a schema and corresponding field values.
- *
- * A Tuple is a fundamental data structure that holds an ordered collection of elements. Each element can be of any type.
- * The schema defines the structure of the Tuple, including the names and types of fields that the Tuple can hold.
- *
- * @constructor Create a new Tuple with a specified schema and field values.
- * @param schema The schema associated with this tuple, defining the structure and types of fields in the tuple.
- * @param fieldVals A list of values corresponding to the fields defined in the schema. Each value in this list
- * is mapped to a field in the schema, in the same order as the fields are defined.
- *
- * @throws IllegalArgumentException if either schema or fieldVals is null, ensuring that every Tuple has a well-defined structure.
- */
-case class Tuple @JsonCreator() (
- @JsonProperty(value = "schema", required = true) schema: Schema,
- @JsonProperty(value = "fields", required = true) fieldVals: Array[Any]
-) extends SeqTupleLike
- with Serializable {
-
- checkNotNull(schema)
- checkNotNull(fieldVals)
- checkSchemaMatchesFields(schema.getAttributes, fieldVals)
-
- override val inMemSize: Long = SizeOf.newInstance().deepSizeOf(this)
-
- @JsonIgnore def length: Int = fieldVals.length
-
- @JsonIgnore def getSchema: Schema = schema
-
- def getField[T](index: Int): T = {
- fieldVals(index).asInstanceOf[T]
- }
-
- def getField[T](attributeName: String): T = {
- if (!schema.containsAttribute(attributeName)) {
- throw new RuntimeException(s"$attributeName is not in the tuple")
- }
- getField(schema.getIndex(attributeName))
- }
-
- def getField[T](attribute: Attribute): T = getField(attribute.getName)
-
- override def getFields: Array[Any] = fieldVals
-
- override def enforceSchema(schema: Schema): Tuple = {
- assert(
- getSchema == schema,
- s"output tuple schema does not match the expected schema! " +
- s"output schema: $getSchema, " +
- s"expected schema: $schema"
- )
- this
- }
-
- override def hashCode: Int = util.Arrays.deepHashCode(getFields.map(_.asInstanceOf[AnyRef]))
-
- override def equals(obj: Any): Boolean =
- obj match {
- case that: Tuple => (this.getFields sameElements that.getFields) && this.schema == that.schema
- case _ => false
- }
-
- def getPartialTuple(attributeNames: List[String]): Tuple = {
- val partialSchema = schema.getPartialSchema(attributeNames)
- val builder = Tuple.Builder(partialSchema)
- val partialArray = attributeNames.map(getField[Any]).toArray
- builder.addSequentially(partialArray)
- builder.build()
- }
-
- override def toString: String =
- s"Tuple [schema=$schema, fields=${fieldVals.mkString("[", ", ", "]")}]"
-
- def asKeyValuePairJson(): ObjectNode = {
- val objectNode = Utils.objectMapper.createObjectNode()
- this.schema.getAttributeNames.foreach { attrName =>
- val valueNode = Utils.objectMapper.convertValue(this.getField(attrName), classOf[JsonNode])
- objectNode.set[ObjectNode](attrName, valueNode)
- }
- objectNode
- }
-
- def asDocument(): Document = {
- val doc = new Document()
- this.schema.getAttributeNames.foreach { attrName =>
- doc.put(attrName, this.getField(attrName))
- }
- doc
- }
-}
-
-object Tuple {
- val toDocument: Tuple => Document = (tuple: Tuple) => {
- val doc = new Document()
- tuple.schema.getAttributeNames.foreach { attrName =>
- doc.put(attrName, tuple.getField(attrName))
- }
- doc
- }
-
- val fromDocument: Schema => Document => Tuple = (schema: Schema) =>
- (doc: Document) => {
- document2Tuple(doc, schema)
- }
-
- /**
- * Validates that the provided attributes match the provided fields in type and order.
- *
- * @param attributes An iterable of Attributes to be validated against the fields.
- * @param fields An iterable of field values to be validated against the attributes.
- * @throws RuntimeException if the sizes of attributes and fields do not match, or if their types are incompatible.
- */
- private def checkSchemaMatchesFields(
- attributes: Iterable[Attribute],
- fields: Iterable[Any]
- ): Unit = {
- val attributeList = attributes.toList
- val fieldList = fields.toList
-
- if (attributeList.size != fieldList.size) {
- throw new RuntimeException(
- s"Schema size (${attributeList.size}) and field size (${fieldList.size}) are different"
- )
- }
-
- (attributeList zip fieldList).foreach {
- case (attribute, field) =>
- checkAttributeMatchesField(attribute, field)
- }
- }
-
- /**
- * Validates that a single field matches its corresponding attribute in type.
- *
- * @param attribute The attribute to be matched.
- * @param field The field value to be checked.
- * @throws RuntimeException if the field's type does not match the attribute's defined type.
- */
- private def checkAttributeMatchesField(attribute: Attribute, field: Any): Unit = {
- if (
- field != null && attribute.getType != AttributeType.ANY && !field.getClass.equals(
- attribute.getType.getFieldClass
- )
- ) {
- throw new RuntimeException(
- s"Attribute ${attribute.getName}'s type (${attribute.getType}) is different from field's type (${AttributeType
- .getAttributeType(field.getClass)})"
- )
- }
- }
-
- /**
- * Creates a new Tuple builder for a specified schema.
- *
- * @param schema The schema for which the Tuple builder will create Tuples.
- * @return A new instance of Tuple.Builder configured with the specified schema.
- */
- def builder(schema: Schema): Builder = {
- Tuple.Builder(schema)
- }
-
- /**
- * Builder class for constructing Tuple instances in a flexible and controlled manner.
- */
- case class Builder(schema: Schema) {
- private val fieldNameMap = mutable.Map.empty[String, Any]
-
- def add(tuple: Tuple, isStrictSchemaMatch: Boolean = true): Builder = {
- require(tuple != null, "Tuple cannot be null")
-
- tuple.getFields.zipWithIndex.foreach {
- case (field, i) =>
- val attribute = tuple.schema.getAttributes(i)
- if (!isStrictSchemaMatch && !schema.containsAttribute(attribute.getName)) {
- // Skip if not matching in non-strict mode
- } else {
- add(attribute, tuple.getFields(i))
- }
- }
- this
- }
-
- def add(attribute: Attribute, field: Any): Builder = {
- require(attribute != null, "Attribute cannot be null")
- checkAttributeMatchesField(attribute, field)
-
- if (!schema.containsAttribute(attribute.getName)) {
- throw new TupleBuildingException(
- s"${attribute.getName} doesn't exist in the expected schema."
- )
- }
-
- fieldNameMap.put(attribute.getName.toLowerCase, field)
- this
- }
-
- def add(attributeName: String, attributeType: AttributeType, field: Any): Builder = {
- require(
- attributeName != null && attributeType != null,
- "Attribute name and type cannot be null"
- )
- this.add(new Attribute(attributeName, attributeType), field)
- this
- }
-
- def addSequentially(fields: Array[Any]): Builder = {
- require(fields != null, "Fields cannot be null")
- checkSchemaMatchesFields(schema.getAttributes, fields)
- schema.getAttributes.zip(fields).foreach {
- case (attribute, field) =>
- this.add(attribute, field)
- }
- this
- }
-
- def build(): Tuple = {
- val missingAttributes =
- schema.getAttributes.filterNot(attr => fieldNameMap.contains(attr.getName.toLowerCase))
- if (missingAttributes.nonEmpty) {
- throw new TupleBuildingException(
- s"Tuple does not have the same number of attributes as schema. Missing attributes are $missingAttributes"
- )
- }
-
- val fields =
- schema.getAttributes.map(attr => fieldNameMap(attr.getName.toLowerCase)).toArray
- new Tuple(schema, fields)
- }
- }
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/model/tuple/TupleLike.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/model/tuple/TupleLike.scala
deleted file mode 100644
index 7c00264cf6e..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/model/tuple/TupleLike.scala
+++ /dev/null
@@ -1,139 +0,0 @@
-package edu.uci.ics.amber.engine.common.model.tuple
-
-import edu.uci.ics.amber.engine.common.workflow.PortIdentity
-
-import scala.jdk.CollectionConverters.CollectionHasAsScala
-
-sealed trait FieldArray {
- def getFields: Array[Any]
-}
-
-sealed trait TupleLike extends FieldArray {
- def inMemSize: Long = 0L
-}
-
-trait SchemaEnforceable {
- def enforceSchema(schema: Schema): Tuple
-}
-
-trait InternalMarker extends TupleLike {
- override def getFields: Array[Any] = Array.empty
-}
-
-final case class FinalizePort(portId: PortIdentity, input: Boolean) extends InternalMarker
-final case class FinalizeExecutor() extends InternalMarker
-
-trait SeqTupleLike extends TupleLike with SchemaEnforceable {
- override def inMemSize: Long = ???
-
- /**
- * Constructs a Tuple object from a sequence of field values
- * according to the specified schema. It asserts that the number
- * of provided fields matches the schema's requirement, every
- * field must also satisfy the field type.
- *
- * @param schema Schema for Tuple construction.
- * @return Tuple constructed according to the schema.
- */
- override def enforceSchema(schema: Schema): Tuple = {
- val attributes = schema.getAttributes
- val builder = Tuple.builder(schema)
- getFields.zipWithIndex.foreach {
- case (value, i) =>
- builder.add(attributes(i), value)
- }
- builder.build()
- }
-
-}
-
-trait MapTupleLike extends SeqTupleLike with SchemaEnforceable {
- override def inMemSize: Long = ???
- def fieldMappings: Map[String, Any]
- override def getFields: Array[Any] = fieldMappings.values.toArray
-
- /**
- * Constructs a `Tuple` based on the provided schema and `tupleLike` object.
- *
- * For each attribute in the schema, the function attempts to find a corresponding value
- * in the tuple-like object's field mappings. If a mapping is found, that value is used;
- * otherwise, `null` is used as the attribute value in the built tuple.
- *
- * @param schema The schema defining the attributes and their types for the tuple.
- * @return A new `Tuple` instance built according to the schema and the data provided
- * by the `tupleLike` object.
- */
- override def enforceSchema(schema: Schema): Tuple = {
- val builder = Tuple.builder(schema)
- schema.getAttributes.foreach { attribute =>
- val value = fieldMappings.getOrElse(attribute.getName, null)
- builder.add(attribute, value)
- }
- builder.build()
- }
-}
-
-object TupleLike {
-
- // Implicit evidence markers for different types
- trait NotAnIterable[A]
-
- // Provide a low-priority implicit evidence for all types that are not Iterable
- trait LowPriorityNotAnIterableImplicits {
- implicit def defaultNotAnIterable[A]: NotAnIterable[A] = new NotAnIterable[A] {}
- }
-
- // Object to hold the implicits
- object NotAnIterable extends LowPriorityNotAnIterableImplicits {
- // Prioritize this implicit for Strings, allowing them explicitly
- implicit object StringIsNotAnIterable extends NotAnIterable[String]
-
- // Ensure Iterable types do not have an implicit NotAnIterable available
- // This is a way to "exclude" Iterable types by not providing an implicit instance for them
- implicit def iterableIsNotAnIterable[C[_] <: Iterable[A], A]: NotAnIterable[C[A]] =
- throw new RuntimeException("Iterable types are not allowed")
- }
-
- def apply(mappings: Map[String, Any]): MapTupleLike = {
- new MapTupleLike {
- override val fieldMappings: Map[String, Any] = mappings
- }
- }
-
- def apply(mappings: Iterable[(String, Any)]): MapTupleLike = {
- new MapTupleLike {
- override val fieldMappings: Map[String, Any] = mappings.toMap
- }
- }
-
- def apply(mappings: (String, Any)*): MapTupleLike = {
- new MapTupleLike {
- override val fieldMappings: Map[String, Any] = mappings.toMap
- }
- }
-
- def apply(fieldList: java.util.List[Any]): SeqTupleLike = {
- new SeqTupleLike {
- override val getFields: Array[Any] = fieldList.asScala.toArray
- }
- }
-
- def apply[T: NotAnIterable](fieldSeq: T*)(implicit ev: NotAnIterable[_] = null): SeqTupleLike = {
- new SeqTupleLike {
- override val getFields: Array[Any] = fieldSeq.toArray
- }
- }
-
- def apply[T <: Any](fieldIter: Iterable[T]): SeqTupleLike = {
- new SeqTupleLike {
- override val getFields: Array[Any] = fieldIter.toArray
- }
- }
-
- def apply(array: Array[Any]): SeqTupleLike = {
- new SeqTupleLike {
- override val getFields: Array[Any] = array
- }
- }
-
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/model/tuple/TupleUtils.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/model/tuple/TupleUtils.scala
deleted file mode 100644
index 4ff9ae2effa..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/model/tuple/TupleUtils.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-package edu.uci.ics.amber.engine.common.model.tuple
-
-import com.fasterxml.jackson.databind.JsonNode
-import AttributeType._
-import AttributeTypeUtils.{inferSchemaFromRows, parseField}
-import edu.uci.ics.amber.engine.common.Utils.objectMapper
-import edu.uci.ics.texera.workflow.operators.source.scan.json.JSONUtil.JSONToMap
-import org.bson.Document
-import org.bson.types.Binary
-
-import scala.collection.mutable.ArrayBuffer
-
-object TupleUtils {
-
- def tuple2json(tuple: Tuple): String = {
- tuple.asKeyValuePairJson().toString
- }
-
- def json2tuple(json: String): Tuple = {
- var fieldNames = Set[String]()
-
- val allFields: ArrayBuffer[Map[String, String]] = ArrayBuffer()
-
- val root: JsonNode = objectMapper.readTree(json)
- if (root.isObject) {
- val fields: Map[String, String] = JSONToMap(root)
- fieldNames = fieldNames.++(fields.keySet)
- allFields += fields
- }
-
- val sortedFieldNames = fieldNames.toList
-
- val attributeTypes = inferSchemaFromRows(allFields.iterator.map(fields => {
- val result = ArrayBuffer[Object]()
- for (fieldName <- sortedFieldNames) {
- if (fields.contains(fieldName)) {
- result += fields(fieldName)
- } else {
- result += null
- }
- }
- result.toArray
- }))
-
- val schema = Schema
- .builder()
- .add(
- sortedFieldNames.indices
- .map(i => new Attribute(sortedFieldNames(i), attributeTypes(i)))
- )
- .build()
-
- try {
- val fields = scala.collection.mutable.ArrayBuffer.empty[Any]
- val data = JSONToMap(objectMapper.readTree(json))
-
- for (fieldName <- schema.getAttributeNames) {
- if (data.contains(fieldName)) {
- fields += parseField(data(fieldName), schema.getAttribute(fieldName).getType)
- } else {
- fields += null
- }
- }
- Tuple.builder(schema).addSequentially(fields.toArray).build()
- } catch {
- case e: Exception => throw e
- }
- }
-
- def document2Tuple(doc: Document, schema: Schema): Tuple = {
- val builder = Tuple.builder(schema)
- schema.getAttributes.foreach(attr =>
- if (attr.getType == BINARY) {
- // special care for converting MongoDB's binary type to byte[] in our schema
- builder.add(attr, doc.get(attr.getName).asInstanceOf[Binary].getData)
- } else {
- builder.add(attr, parseField(doc.get(attr.getName), attr.getType))
- }
- )
- builder.build()
- }
-
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/rpc/AsyncRPCClient.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/rpc/AsyncRPCClient.scala
index 22657bd106b..b7f8454e317 100644
--- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/rpc/AsyncRPCClient.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/rpc/AsyncRPCClient.scala
@@ -3,21 +3,7 @@ package edu.uci.ics.amber.engine.common.rpc
import com.twitter.util.{Future, Promise}
import edu.uci.ics.amber.engine.architecture.controller.ClientEvent
import edu.uci.ics.amber.engine.architecture.messaginglayer.NetworkOutputGateway
-import edu.uci.ics.amber.engine.common.AmberLogging
-import edu.uci.ics.amber.engine.common.virtualidentity.{
- ActorVirtualIdentity,
- ChannelIdentity,
- ChannelMarkerIdentity
-}
-import edu.uci.ics.amber.engine.common.virtualidentity.util.CLIENT
-import io.grpc.MethodDescriptor
-import edu.uci.ics.amber.engine.architecture.rpc.controlcommands.{
- AsyncRPCContext,
- ChannelMarkerPayload,
- ChannelMarkerType,
- ControlInvocation,
- ControlRequest
-}
+import edu.uci.ics.amber.engine.architecture.rpc.controlcommands._
import edu.uci.ics.amber.engine.architecture.rpc.controllerservice.ControllerServiceFs2Grpc
import edu.uci.ics.amber.engine.architecture.rpc.controlreturns.{
ControlError,
@@ -26,8 +12,16 @@ import edu.uci.ics.amber.engine.architecture.rpc.controlreturns.{
WorkerMetricsResponse
}
import edu.uci.ics.amber.engine.architecture.rpc.workerservice.WorkerServiceFs2Grpc
+import edu.uci.ics.amber.engine.common.AmberLogging
import edu.uci.ics.amber.engine.common.rpc.AsyncRPCClient.createProxy
+import edu.uci.ics.amber.engine.common.virtualidentity.util.CLIENT
import edu.uci.ics.amber.error.ErrorUtils.reconstructThrowable
+import edu.uci.ics.amber.core.virtualidentity.{
+ ActorVirtualIdentity,
+ ChannelIdentity,
+ ChannelMarkerIdentity
+}
+import io.grpc.MethodDescriptor
import java.lang.reflect.{InvocationHandler, Method, Proxy}
import scala.collection.mutable
diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/rpc/AsyncRPCHandlerInitializer.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/rpc/AsyncRPCHandlerInitializer.scala
index 149c1e423ad..4205027fda4 100644
--- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/rpc/AsyncRPCHandlerInitializer.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/rpc/AsyncRPCHandlerInitializer.scala
@@ -6,7 +6,7 @@ import edu.uci.ics.amber.engine.architecture.rpc.controlcommands._
import edu.uci.ics.amber.engine.architecture.rpc.controllerservice.ControllerServiceFs2Grpc
import edu.uci.ics.amber.engine.architecture.rpc.controlreturns._
import edu.uci.ics.amber.engine.architecture.rpc.workerservice.WorkerServiceFs2Grpc
-import edu.uci.ics.amber.engine.common.virtualidentity.{
+import edu.uci.ics.amber.core.virtualidentity.{
ActorVirtualIdentity,
ChannelIdentity,
ChannelMarkerIdentity
@@ -19,8 +19,11 @@ class AsyncRPCHandlerInitializer(
ctrlReceiver: AsyncRPCServer
) {
implicit def returnAsFuture[R](ret: R): Future[R] = Future[R](ret)
+
implicit def actorIdAsContext(to: ActorVirtualIdentity): AsyncRPCContext = mkContext(to)
+
implicit def stringToResponse(s: String): StringResponse = StringResponse(s)
+
implicit def intToResponse(i: Int): IntResponse = IntResponse(i)
// register all handlers
@@ -28,7 +31,9 @@ class AsyncRPCHandlerInitializer(
def controllerInterface: ControllerServiceFs2Grpc[Future, AsyncRPCContext] =
ctrlSource.controllerInterface
+
def workerInterface: WorkerServiceFs2Grpc[Future, AsyncRPCContext] = ctrlSource.workerInterface
+
def mkContext(to: ActorVirtualIdentity): AsyncRPCContext = ctrlSource.mkContext(to)
def sendChannelMarker(
diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/rpc/AsyncRPCServer.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/rpc/AsyncRPCServer.scala
index 326537b72f9..1977bc9764d 100644
--- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/rpc/AsyncRPCServer.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/rpc/AsyncRPCServer.scala
@@ -9,8 +9,8 @@ import edu.uci.ics.amber.engine.architecture.rpc.controlcommands.{
}
import edu.uci.ics.amber.engine.architecture.rpc.controlreturns.{ControlReturn, ReturnInvocation}
import edu.uci.ics.amber.engine.common.AmberLogging
-import edu.uci.ics.amber.engine.common.virtualidentity.ActorVirtualIdentity
import edu.uci.ics.amber.error.ErrorUtils.mkControlError
+import edu.uci.ics.amber.core.virtualidentity.ActorVirtualIdentity
import java.lang.reflect.Method
import scala.collection.mutable
@@ -59,7 +59,13 @@ class AsyncRPCServer(
): Unit = {
try {
val result =
- method.invoke(handler, requestArg, contextArg)
+ try {
+ method.invoke(handler, requestArg, contextArg)
+ } catch {
+ case e: java.lang.reflect.InvocationTargetException =>
+ throw Option(e.getCause).getOrElse(e)
+ case e: Throwable => throw e
+ }
result
.asInstanceOf[Future[ControlReturn]]
.onSuccess { ret =>
diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/statetransition/StateManager.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/statetransition/StateManager.scala
index 39291b5c9d3..d346497ec39 100644
--- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/statetransition/StateManager.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/statetransition/StateManager.scala
@@ -1,11 +1,11 @@
package edu.uci.ics.amber.engine.common.statetransition
-import edu.uci.ics.amber.engine.common.amberexception.WorkflowRuntimeException
+import edu.uci.ics.amber.core.WorkflowRuntimeException
import edu.uci.ics.amber.engine.common.statetransition.StateManager.{
InvalidStateException,
InvalidTransitionException
}
-import edu.uci.ics.amber.engine.common.virtualidentity.ActorVirtualIdentity
+import edu.uci.ics.amber.core.virtualidentity.ActorVirtualIdentity
object StateManager {
diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/statetransition/WorkerStateManager.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/statetransition/WorkerStateManager.scala
index a931e77f087..199925d91ef 100644
--- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/statetransition/WorkerStateManager.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/statetransition/WorkerStateManager.scala
@@ -2,7 +2,7 @@ package edu.uci.ics.amber.engine.common.statetransition
import edu.uci.ics.amber.engine.architecture.worker.statistics.WorkerState
import edu.uci.ics.amber.engine.architecture.worker.statistics.WorkerState._
-import edu.uci.ics.amber.engine.common.virtualidentity.ActorVirtualIdentity
+import edu.uci.ics.amber.core.virtualidentity.ActorVirtualIdentity
// The following pattern is a good practice of enum in scala
// We've always used this pattern in the codebase
diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/storage/BufferedItemWriter.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/storage/BufferedItemWriter.scala
deleted file mode 100644
index 76bac137b09..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/storage/BufferedItemWriter.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-package edu.uci.ics.amber.engine.common.storage
-
-/**
- * BufferedItemWriter provides an interface for writing items to a buffer and performing I/O operations.
- * The items are buffered before being written to the underlying storage to optimize performance.
- * @tparam T the type of data items to be written.
- */
-trait BufferedItemWriter[T] {
-
- /**
- * The size of the buffer.
- * @return the buffer size.
- */
- val bufferSize: Int
-
- /**
- * Open the writer, initializing any necessary resources.
- * This method should be called before any write operations.
- */
- def open(): Unit
-
- /**
- * Close the writer, flushing any remaining items in the buffer
- * to the underlying storage and releasing any held resources.
- */
- def close(): Unit
-
- /**
- * Put one item into the buffer. If the buffer is full, it should be flushed to the underlying storage.
- * @param item the data item to be written.
- */
- def putOne(item: T): Unit
-
- /**
- * Remove one item from the buffer. If the item is not found in the buffer, an appropriate action should be taken,
- * such as throwing an exception or ignoring the request.
- * @param item the data item to be removed.
- */
- def removeOne(item: T): Unit
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/storage/DatasetFileDocument.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/storage/DatasetFileDocument.scala
deleted file mode 100644
index fa4d740b5c6..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/storage/DatasetFileDocument.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-package edu.uci.ics.amber.engine.common.storage
-
-import edu.uci.ics.texera.web.resource.dashboard.user.dataset.service.GitVersionControlLocalFileStorage
-import edu.uci.ics.texera.web.resource.dashboard.user.dataset.utils.PathUtils
-import org.jooq.types.UInteger
-
-import java.io.{File, FileOutputStream, InputStream}
-import java.net.{URI, URLDecoder}
-import java.nio.charset.StandardCharsets
-import java.nio.file.{Files, Path, Paths}
-import scala.jdk.CollectionConverters.IteratorHasAsScala
-
-class DatasetFileDocument(uri: URI) extends VirtualDocument[Nothing] {
- // Utility function to parse and decode URI segments into individual components
- private def parseUri(uri: URI): (Int, String, Path) = {
- val segments = Paths.get(uri.getPath).iterator().asScala.map(_.toString).toArray
- if (segments.length < 3)
- throw new IllegalArgumentException("URI format is incorrect")
-
- val did = segments(0).toInt
- val datasetVersionHash = URLDecoder.decode(segments(1), StandardCharsets.UTF_8)
- val decodedRelativeSegments =
- segments.drop(2).map(part => URLDecoder.decode(part, StandardCharsets.UTF_8))
- val fileRelativePath = Paths.get(decodedRelativeSegments.head, decodedRelativeSegments.tail: _*)
-
- (did, datasetVersionHash, fileRelativePath)
- }
-
- // Extract components from URI using the utility function
- private val (did, datasetVersionHash, fileRelativePath) = parseUri(uri)
-
- private var tempFile: Option[File] = None
-
- override def getURI: URI = uri
-
- override def asInputStream(): InputStream = {
- val datasetAbsolutePath = PathUtils.getDatasetPath(UInteger.valueOf(did))
- GitVersionControlLocalFileStorage
- .retrieveFileContentOfVersionAsInputStream(
- datasetAbsolutePath,
- datasetVersionHash,
- datasetAbsolutePath.resolve(fileRelativePath)
- )
- }
-
- override def asFile(): File = {
- tempFile match {
- case Some(file) => file
- case None =>
- val tempFilePath = Files.createTempFile("versionedFile", ".tmp")
- val tempFileStream = new FileOutputStream(tempFilePath.toFile)
- val inputStream = asInputStream()
-
- val buffer = new Array[Byte](1024)
-
- // Create an iterator to repeatedly call inputStream.read, and direct buffered data to file
- Iterator
- .continually(inputStream.read(buffer))
- .takeWhile(_ != -1)
- .foreach(tempFileStream.write(buffer, 0, _))
-
- inputStream.close()
- tempFileStream.close()
-
- val file = tempFilePath.toFile
- tempFile = Some(file)
- file
- }
- }
-
- override def remove(): Unit = {
- // first remove the temporary file
- tempFile match {
- case Some(file) => Files.delete(file.toPath)
- case None => // Do nothing
- }
- // then remove the dataset file
- GitVersionControlLocalFileStorage.removeFileFromRepo(
- PathUtils.getDatasetPath(UInteger.valueOf(did)),
- PathUtils.getDatasetPath(UInteger.valueOf(did)).resolve(fileRelativePath)
- )
- }
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/storage/DocumentFactory.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/storage/DocumentFactory.scala
deleted file mode 100644
index f216504d096..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/storage/DocumentFactory.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-package edu.uci.ics.amber.engine.common.storage
-
-import edu.uci.ics.texera.workflow.common.storage.FileResolver.DATASET_FILE_URI_SCHEME
-
-import java.net.URI
-
-object DocumentFactory {
- def newReadonlyDocument(fileUri: URI): ReadonlyVirtualDocument[_] = {
- fileUri.getScheme match {
- case DATASET_FILE_URI_SCHEME =>
- new DatasetFileDocument(fileUri)
-
- case "file" =>
- // For local files, create a ReadonlyLocalFileDocument
- new ReadonlyLocalFileDocument(fileUri)
-
- case _ =>
- throw new UnsupportedOperationException(s"Unsupported URI scheme: ${fileUri.getScheme}")
- }
- }
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/storage/FileDocument.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/storage/FileDocument.scala
deleted file mode 100644
index b6d191e6dc3..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/storage/FileDocument.scala
+++ /dev/null
@@ -1,167 +0,0 @@
-package edu.uci.ics.amber.engine.common.storage
-import com.twitter.chill.{KryoPool, ScalaKryoInstantiator}
-import edu.uci.ics.amber.engine.common.storage.FileDocument.kryoPool
-import org.apache.commons.vfs2.{FileObject, VFS}
-
-import java.io.InputStream
-import java.net.URI
-import java.util.concurrent.locks.ReentrantReadWriteLock
-
-object FileDocument {
- // Initialize KryoPool as a static object
- private val kryoPool = KryoPool.withByteArrayOutputStream(10, new ScalaKryoInstantiator)
-}
-
-/**
- * FileDocument provides methods to read/write a file located on filesystem.
- * All methods are THREAD-SAFE implemented using read-write lock:
- * - 1 writer at a time: only 1 thread of current JVM can acquire the write lock
- * - n reader at a time: multiple threads of current JVM can acquire the read lock
- *
- * The type parameter T is used to specify the iterable data item stored in the File. FileDocument provides easy ways of setting/iterating these data items
- *
- * @param uri the identifier of the file. If file doesn't physically exist, FileDocument will create the file during the constructing phase.
- */
-class FileDocument[T >: Null <: AnyRef](val uri: URI) extends VirtualDocument[T] {
- val file: FileObject = VFS.getManager.resolveFile(uri.toString)
- val lock = new ReentrantReadWriteLock()
-
- // Utility function to wrap code block with read lock
- private def withReadLock[M](block: => M): M = {
- lock.readLock().lock()
- try {
- block
- } finally {
- lock.readLock().unlock()
- }
- }
-
- // Utility function to wrap code block with write lock
- private def withWriteLock(block: => Unit): Unit = {
- lock.writeLock().lock()
- try {
- block
- } finally {
- lock.writeLock().unlock()
- }
- }
-
- // Check and create the file if it does not exist
- withWriteLock {
- if (!file.exists()) {
- val parentDir = file.getParent
- if (parentDir != null && !parentDir.exists()) {
- parentDir.createFolder() // Create all necessary parent directories
- }
- file.createFile() // Create the file if it does not exist
- }
- }
-
- /**
- * Append the content in the inputStream to the FileDocument. This method is THREAD-SAFE
- * This method will NOT do any serialization. So the it is invalid to use getItem and iterator to get T from the document.
- * @param inputStream the data source input stream
- */
- override def appendStream(inputStream: InputStream): Unit =
- withWriteLock {
- val outStream = file.getContent.getOutputStream(true)
- try {
- // create a buffer for reading from inputStream
- val buffer = new Array[Byte](1024)
- // create an Iterator to repeatedly call inputStream.read, and direct buffered data to file
- Iterator
- .continually(inputStream.read(buffer))
- .takeWhile(_ != -1)
- .foreach(outStream.write(buffer, 0, _))
- } finally {
- outStream.close()
- }
- }
-
- /**
- * Append the content in the given object to the FileDocument. This method is THREAD-SAFE
- * Each record will be stored as
* See
+ * from-httpservletrequest-in-a-web-socket-serverendpoint">
*
*/
class ServletAwareConfigurator extends ServerEndpointConfig.Configurator with LazyLogging {
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/SqlServer.java b/core/amber/src/main/scala/edu/uci/ics/texera/web/SqlServer.java
deleted file mode 100644
index d5f7bd1b2ef..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/SqlServer.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package edu.uci.ics.texera.web;
-
-import com.mysql.cj.jdbc.MysqlDataSource;
-import edu.uci.ics.amber.engine.common.AmberConfig;
-import org.jooq.DSLContext;
-import org.jooq.SQLDialect;
-import org.jooq.impl.DSL;
-
-public final class SqlServer {
- public static final SQLDialect SQL_DIALECT = SQLDialect.MYSQL;
- private static final MysqlDataSource dataSource;
- public static DSLContext context;
-
- static {
- dataSource = new MysqlDataSource();
- dataSource.setUrl(AmberConfig.jdbcConfig().getString("url"));
- dataSource.setUser(AmberConfig.jdbcConfig().getString("username"));
- dataSource.setPassword(AmberConfig.jdbcConfig().getString("password"));
- context = DSL.using(dataSource, SQL_DIALECT);
- }
-
- public static DSLContext createDSLContext() {
- return context;
- }
-
- public static void replaceDSLContext(DSLContext newContext){
- context = newContext;
- }
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/TexeraWebApplication.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/TexeraWebApplication.scala
index 90dfffbec6a..7581b7d4094 100644
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/TexeraWebApplication.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/TexeraWebApplication.scala
@@ -3,62 +3,39 @@ package edu.uci.ics.texera.web
import com.fasterxml.jackson.databind.module.SimpleModule
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.github.dirkraft.dropwizard.fileassets.FileAssetsBundle
-import com.github.toastshaman.dropwizard.auth.jwt.JwtAuthFilter
import com.typesafe.scalalogging.LazyLogging
-import edu.uci.ics.amber.engine.architecture.controller.ControllerConfig
-import edu.uci.ics.amber.engine.architecture.rpc.controlreturns.WorkflowAggregatedState.{
- COMPLETED,
- FAILED
-}
-import edu.uci.ics.amber.engine.common.AmberRuntime.scheduleRecurringCallThroughActorSystem
-import edu.uci.ics.amber.engine.common.{AmberConfig, AmberRuntime, Utils}
-import edu.uci.ics.amber.engine.common.client.AmberClient
-import edu.uci.ics.amber.engine.common.model.{PhysicalPlan, WorkflowContext}
-import edu.uci.ics.amber.engine.common.storage.SequentialRecordStorage
-import edu.uci.ics.amber.engine.common.virtualidentity.ExecutionIdentity
-import Utils.{maptoStatusCode, objectMapper}
-import edu.uci.ics.texera.web.auth.JwtAuth.jwtConsumer
-import edu.uci.ics.texera.web.auth.{
- GuestAuthFilter,
- SessionUser,
- UserAuthenticator,
- UserRoleAuthorizer
-}
-import edu.uci.ics.texera.web.model.jooq.generated.tables.pojos.WorkflowExecutions
-import edu.uci.ics.texera.web.resource.auth.{AuthResource, GoogleAuthResource}
+import edu.uci.ics.amber.core.storage.util.dataset.GitVersionControlLocalFileStorage
+import edu.uci.ics.amber.engine.common.Utils
+import edu.uci.ics.amber.util.PathUtils
+import edu.uci.ics.texera.web.auth.JwtAuth.setupJwtAuth
+import edu.uci.ics.texera.web.auth.SessionUser
import edu.uci.ics.texera.web.resource._
+import edu.uci.ics.texera.web.resource.auth.{AuthResource, GoogleAuthResource}
import edu.uci.ics.texera.web.resource.dashboard.DashboardResource
import edu.uci.ics.texera.web.resource.dashboard.admin.execution.AdminExecutionResource
import edu.uci.ics.texera.web.resource.dashboard.admin.user.AdminUserResource
import edu.uci.ics.texera.web.resource.dashboard.hub.workflow.HubWorkflowResource
-import edu.uci.ics.texera.web.resource.dashboard.user.dataset.{
- DatasetAccessResource,
- DatasetResource
-}
import edu.uci.ics.texera.web.resource.dashboard.user.dataset.`type`.{
DatasetFileNode,
DatasetFileNodeSerializer
}
-import edu.uci.ics.texera.web.resource.dashboard.user.dataset.service.GitVersionControlLocalFileStorage
-import edu.uci.ics.texera.web.resource.dashboard.user.dataset.utils.PathUtils.getAllDatasetDirectories
+import edu.uci.ics.texera.web.resource.dashboard.user.dataset.{
+ DatasetAccessResource,
+ DatasetResource
+}
import edu.uci.ics.texera.web.resource.dashboard.user.project.{
ProjectAccessResource,
ProjectResource,
PublicProjectResource
}
import edu.uci.ics.texera.web.resource.dashboard.user.quota.UserQuotaResource
-import edu.uci.ics.texera.web.resource.dashboard.user.discussion.UserDiscussionResource
import edu.uci.ics.texera.web.resource.dashboard.user.workflow.{
WorkflowAccessResource,
WorkflowExecutionsResource,
WorkflowResource,
WorkflowVersionResource
}
-import edu.uci.ics.texera.web.resource.languageserver.PythonLanguageServerManager
-import edu.uci.ics.texera.web.service.ExecutionsMetadataPersistService
-import edu.uci.ics.texera.web.storage.MongoDatabaseManager
-import edu.uci.ics.texera.workflow.common.storage.OpResultStorage
-import io.dropwizard.auth.{AuthDynamicFeature, AuthValueFactoryProvider}
+import io.dropwizard.auth.AuthValueFactoryProvider
import io.dropwizard.setup.{Bootstrap, Environment}
import io.dropwizard.websockets.WebsocketBundle
import org.eclipse.jetty.server.session.SessionHandler
@@ -68,68 +45,24 @@ import org.glassfish.jersey.media.multipart.MultiPartFeature
import org.glassfish.jersey.server.filter.RolesAllowedDynamicFeature
import java.time.Duration
-import scala.concurrent.duration.DurationInt
-import org.apache.commons.jcs3.access.exception.InvalidArgumentException
-
-import java.net.URI
-import scala.annotation.tailrec
object TexeraWebApplication {
// this method is used to abort uncommitted changes for every dataset
def discardUncommittedChangesOfAllDatasets(): Unit = {
- val datasetPaths = getAllDatasetDirectories()
+ val datasetPaths = PathUtils.getAllDatasetDirectories()
datasetPaths.foreach(path => {
GitVersionControlLocalFileStorage.discardUncommittedChanges(path)
})
}
- def createAmberRuntime(
- workflowContext: WorkflowContext,
- physicalPlan: PhysicalPlan,
- opResultStorage: OpResultStorage,
- conf: ControllerConfig,
- errorHandler: Throwable => Unit
- ): AmberClient = {
- new AmberClient(
- AmberRuntime.actorSystem,
- workflowContext,
- physicalPlan,
- opResultStorage,
- conf,
- errorHandler
- )
- }
-
- type OptionMap = Map[Symbol, Any]
- def parseArgs(args: Array[String]): OptionMap = {
- @tailrec
- def nextOption(map: OptionMap, list: List[String]): OptionMap = {
- list match {
- case Nil => map
- case "--cluster" :: value :: tail =>
- nextOption(map ++ Map(Symbol("cluster") -> value.toBoolean), tail)
- case option :: tail =>
- throw new InvalidArgumentException("unknown command-line arg")
- }
- }
-
- nextOption(Map(), args.toList)
- }
-
def main(args: Array[String]): Unit = {
- val argMap = parseArgs(args)
-
- val clusterMode = argMap.get(Symbol("cluster")).asInstanceOf[Option[Boolean]].getOrElse(false)
// TODO: figure out a safety way of calling discardUncommittedChangesOfAllDatasets
// Currently in kubernetes, multiple pods calling this function can result into thread competition
// discardUncommittedChangesOfAllDatasets()
- // start actor system master node
- AmberRuntime.startActorMaster(clusterMode)
-
// start web server
new TexeraWebApplication().run(
"server",
@@ -140,7 +73,6 @@ object TexeraWebApplication {
.resolve("web-config.yml")
.toString
)
- PythonLanguageServerManager.startLanguageServer()
}
}
@@ -152,7 +84,6 @@ class TexeraWebApplication
// serve static frontend GUI files
bootstrap.addBundle(new FileAssetsBundle("../gui/dist", "/", "index.html"))
// add websocket bundle
- bootstrap.addBundle(new WebsocketBundle(classOf[WorkflowWebsocketResource]))
bootstrap.addBundle(new WebsocketBundle(classOf[CollaborationResource]))
// register scala module to dropwizard default object mapper
bootstrap.getObjectMapper.registerModule(DefaultScalaModule)
@@ -161,32 +92,6 @@ class TexeraWebApplication
val customSerializerModule = new SimpleModule("CustomSerializers")
customSerializerModule.addSerializer(classOf[DatasetFileNode], new DatasetFileNodeSerializer())
bootstrap.getObjectMapper.registerModule(customSerializerModule)
-
- if (AmberConfig.isUserSystemEnabled) {
- val timeToLive: Int = AmberConfig.sinkStorageTTLInSecs
- if (AmberConfig.cleanupAllExecutionResults) {
- // do one time cleanup of collections that were not closed gracefully before restart/crash
- // retrieve all executions that were executing before the reboot.
- val allExecutionsBeforeRestart: List[WorkflowExecutions] =
- WorkflowExecutionsResource.getExpiredExecutionsWithResultOrLog(-1)
- cleanExecutions(
- allExecutionsBeforeRestart,
- statusByte => {
- if (statusByte != maptoStatusCode(COMPLETED)) {
- maptoStatusCode(FAILED) // for incomplete executions, mark them as failed.
- } else {
- statusByte
- }
- }
- )
- }
- scheduleRecurringCallThroughActorSystem(
- 2.seconds,
- AmberConfig.sinkStorageCleanUpCheckIntervalInSecs.seconds
- ) {
- recurringCheckExpiredResults(timeToLive)
- }
- }
}
override def run(configuration: TexeraWebConfiguration, environment: Environment): Unit = {
@@ -215,29 +120,8 @@ class TexeraWebApplication
environment.jersey.register(classOf[SystemMetadataResource])
// environment.jersey().register(classOf[MockKillWorkerResource])
- environment.jersey.register(classOf[SchemaPropagationResource])
- if (AmberConfig.isUserSystemEnabled) {
- // register JWT Auth layer
- environment.jersey.register(
- new AuthDynamicFeature(
- new JwtAuthFilter.Builder[SessionUser]()
- .setJwtConsumer(jwtConsumer)
- .setRealm("realm")
- .setPrefix("Bearer")
- .setAuthenticator(UserAuthenticator)
- .setAuthorizer(UserRoleAuthorizer)
- .buildAuthFilter()
- )
- )
- } else {
- // register Guest Auth layer
- environment.jersey.register(
- new AuthDynamicFeature(
- new GuestAuthFilter.Builder().setAuthorizer(UserRoleAuthorizer).buildAuthFilter()
- )
- )
- }
+ setupJwtAuth(environment)
environment.jersey.register(
new AuthValueFactoryProvider.Binder[SessionUser](classOf[SessionUser])
@@ -262,83 +146,6 @@ class TexeraWebApplication
environment.jersey.register(classOf[GmailResource])
environment.jersey.register(classOf[AdminExecutionResource])
environment.jersey.register(classOf[UserQuotaResource])
- environment.jersey.register(classOf[UserDiscussionResource])
environment.jersey.register(classOf[AIAssistantResource])
}
-
- /**
- * This function drops the collections.
- * MongoDB doesn't have an API of drop collection where collection name in (from a subquery), so the implementation is to retrieve
- * the entire list of those documents that have expired, then loop the list to drop them one by one
- */
- private def cleanExecutions(
- executions: List[WorkflowExecutions],
- statusChangeFunc: Byte => Byte
- ): Unit = {
- // drop the collection and update the status to ABORTED
- executions.foreach(execEntry => {
- dropCollections(execEntry.getResult)
- deleteReplayLog(execEntry.getLogLocation)
- // then delete the pointer from mySQL
- val executionIdentity = ExecutionIdentity(execEntry.getEid.longValue())
- ExecutionsMetadataPersistService.tryUpdateExistingExecution(executionIdentity) { execution =>
- execution.setResult("")
- execution.setLogLocation(null)
- execution.setStatus(statusChangeFunc(execution.getStatus))
- }
- })
- }
-
- def dropCollections(result: String): Unit = {
- if (result == null || result.isEmpty) {
- return
- }
- // TODO: merge this logic to the server-side in-mem cleanup
- // parse the JSON
- try {
- val node = objectMapper.readTree(result)
- val collectionEntries = node.get("results")
- // loop every collection and drop it
- collectionEntries.forEach(collection => {
- val storageType = collection.get("storageType").asText()
- val collectionName = collection.get("storageKey").asText()
- storageType match {
- case OpResultStorage.MEMORY =>
- // rely on the server-side result cleanup logic.
- case OpResultStorage.MONGODB =>
- MongoDatabaseManager.dropCollection(collectionName)
- }
- })
- } catch {
- case e: Throwable =>
- logger.warn("result collection cleanup failed.", e)
- }
- }
-
- def deleteReplayLog(logLocation: String): Unit = {
- if (logLocation == null || logLocation.isEmpty) {
- return
- }
- val uri = new URI(logLocation)
- try {
- val storage = SequentialRecordStorage.getStorage(Some(uri))
- storage.deleteStorage()
- } catch {
- case throwable: Throwable =>
- logger.warn(s"failed to delete log at $logLocation", throwable)
- }
- }
-
- /**
- * This function is called periodically and checks all expired collections and deletes them
- */
- def recurringCheckExpiredResults(
- timeToLive: Int
- ): Unit = {
- // retrieve all executions that are completed and their last update time goes beyond the ttl
- val expiredResults: List[WorkflowExecutions] =
- WorkflowExecutionsResource.getExpiredExecutionsWithResultOrLog(timeToLive)
- // drop the collections and clean the logs
- cleanExecutions(expiredResults, statusByte => statusByte)
- }
}
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/TexeraWorkflowCompilingService.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/TexeraWorkflowCompilingService.scala
deleted file mode 100644
index ab3fdb4dacb..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/TexeraWorkflowCompilingService.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-package edu.uci.ics.texera.web
-
-import com.fasterxml.jackson.module.scala.DefaultScalaModule
-import com.github.toastshaman.dropwizard.auth.jwt.JwtAuthFilter
-import com.typesafe.scalalogging.LazyLogging
-import edu.uci.ics.amber.engine.common.{AmberConfig, Utils}
-import edu.uci.ics.texera.web.auth.JwtAuth.jwtConsumer
-import edu.uci.ics.texera.web.auth.{
- GuestAuthFilter,
- SessionUser,
- UserAuthenticator,
- UserRoleAuthorizer
-}
-import edu.uci.ics.texera.web.resource.WorkflowCompilationResource
-import io.dropwizard.auth.{AuthDynamicFeature, AuthValueFactoryProvider}
-import io.dropwizard.setup.{Bootstrap, Environment}
-import org.glassfish.jersey.server.filter.RolesAllowedDynamicFeature
-
-object TexeraWorkflowCompilingService {
- def main(args: Array[String]): Unit = {
-
- new TexeraWorkflowCompilingService().run(
- "server",
- Utils.amberHomePath
- .resolve("src")
- .resolve("main")
- .resolve("resources")
- .resolve("texera-compiling-service-web-config.yml")
- .toString
- )
- }
-}
-
-class TexeraWorkflowCompilingService
- extends io.dropwizard.Application[TexeraWorkflowCompilingServiceConfiguration]
- with LazyLogging {
- override def initialize(
- bootstrap: Bootstrap[TexeraWorkflowCompilingServiceConfiguration]
- ): Unit = {
- // register scala module to dropwizard default object mapper
- bootstrap.getObjectMapper.registerModule(DefaultScalaModule)
- }
-
- override def run(
- configuration: TexeraWorkflowCompilingServiceConfiguration,
- environment: Environment
- ): Unit = {
- // serve backend at /api/texera
- environment.jersey.setUrlPattern("/api/texera/*")
-
- // register the compilation endpoint
- environment.jersey.register(classOf[WorkflowCompilationResource])
-
- // Add JWT Auth layer (without session)
- if (AmberConfig.isUserSystemEnabled) {
- environment.jersey.register(
- new AuthDynamicFeature(
- new JwtAuthFilter.Builder[SessionUser]()
- .setJwtConsumer(jwtConsumer)
- .setRealm("realm")
- .setPrefix("Bearer")
- .setAuthenticator(UserAuthenticator)
- .setAuthorizer(UserRoleAuthorizer)
- .buildAuthFilter()
- )
- )
- } else {
- environment.jersey.register(
- new AuthDynamicFeature(
- new GuestAuthFilter.Builder().setAuthorizer(UserRoleAuthorizer).buildAuthFilter()
- )
- )
- }
-
- environment.jersey.register(
- new AuthValueFactoryProvider.Binder[SessionUser](classOf[SessionUser])
- )
- environment.jersey.register(classOf[RolesAllowedDynamicFeature])
- }
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/TexeraWorkflowCompilingServiceConfiguration.java b/core/amber/src/main/scala/edu/uci/ics/texera/web/TexeraWorkflowCompilingServiceConfiguration.java
deleted file mode 100644
index 65f3a60fcaf..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/TexeraWorkflowCompilingServiceConfiguration.java
+++ /dev/null
@@ -1,6 +0,0 @@
-package edu.uci.ics.texera.web;
-
-import io.dropwizard.Configuration;
-
-public class TexeraWorkflowCompilingServiceConfiguration extends Configuration {
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/WebsocketInput.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/WebsocketInput.scala
index c85bcf7d570..bc4a3eddba4 100644
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/WebsocketInput.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/WebsocketInput.scala
@@ -9,6 +9,7 @@ import scala.reflect.{ClassTag, classTag}
class WebsocketInput(errorHandler: Throwable => Unit) {
private val wsInput = PublishSubject.create[(TexeraWebSocketRequest, Option[UInteger])]()
+
def subscribe[T <: TexeraWebSocketRequest: ClassTag](
callback: (T, Option[UInteger]) => Unit
): Disposable = {
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/WorkflowLifecycleManager.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/WorkflowLifecycleManager.scala
index 5d38687b242..e128490040d 100644
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/WorkflowLifecycleManager.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/WorkflowLifecycleManager.scala
@@ -1,14 +1,14 @@
package edu.uci.ics.texera.web
-import java.time.{LocalDateTime, Duration => JDuration}
import akka.actor.Cancellable
import com.typesafe.scalalogging.LazyLogging
import edu.uci.ics.amber.engine.architecture.rpc.controlreturns.WorkflowAggregatedState
import edu.uci.ics.amber.engine.architecture.rpc.controlreturns.WorkflowAggregatedState.RUNNING
import edu.uci.ics.amber.engine.common.AmberRuntime
+import edu.uci.ics.amber.engine.common.executionruntimestate.ExecutionMetadataStore
import edu.uci.ics.texera.web.storage.ExecutionStateStore
-import edu.uci.ics.amber.engine.common.workflowruntimestate.ExecutionMetadataStore
+import java.time.{LocalDateTime, Duration => JDuration}
import scala.concurrent.duration.DurationInt
class WorkflowLifecycleManager(id: String, cleanUpTimeout: Int, cleanUpCallback: () => Unit)
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/GuestAuthFilter.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/GuestAuthFilter.scala
index a23252e07a6..318a12c8f2d 100644
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/GuestAuthFilter.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/GuestAuthFilter.scala
@@ -1,24 +1,23 @@
package edu.uci.ics.texera.web.auth
import edu.uci.ics.texera.web.auth.GuestAuthFilter.GUEST
-import edu.uci.ics.texera.web.model.jooq.generated.enums.UserRole
-import edu.uci.ics.texera.web.model.jooq.generated.tables.pojos.User
+import edu.uci.ics.texera.dao.jooq.generated.enums.UserRole
+import edu.uci.ics.texera.dao.jooq.generated.tables.pojos.User
import io.dropwizard.auth.AuthFilter
-import javax.annotation.Nullable
-import javax.annotation.Priority
-import javax.ws.rs.Priorities
-import javax.ws.rs.container.ContainerRequestContext
-import javax.ws.rs.container.PreMatching
-import javax.ws.rs.core.SecurityContext
import java.io.IOException
import java.util.Optional
+import javax.annotation.{Nullable, Priority}
+import javax.ws.rs.Priorities
+import javax.ws.rs.container.{ContainerRequestContext, PreMatching}
+import javax.ws.rs.core.SecurityContext
@PreMatching
@Priority(Priorities.AUTHENTICATION) object GuestAuthFilter {
class Builder extends AuthFilter.AuthFilterBuilder[String, SessionUser, GuestAuthFilter] {
override protected def newInstance = new GuestAuthFilter
}
+
val GUEST: User = new User(null, "guest", null, null, null, UserRole.REGULAR, null)
}
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/JwtAuth.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/JwtAuth.scala
index 42170b00ae9..de67708c794 100644
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/JwtAuth.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/JwtAuth.scala
@@ -1,8 +1,11 @@
package edu.uci.ics.texera.web.auth
+import com.github.toastshaman.dropwizard.auth.jwt.JwtAuthFilter
import com.typesafe.config.Config
import edu.uci.ics.amber.engine.common.AmberConfig
-import edu.uci.ics.texera.web.model.jooq.generated.tables.pojos.User
+import edu.uci.ics.texera.dao.jooq.generated.tables.pojos.User
+import io.dropwizard.auth.AuthDynamicFeature
+import io.dropwizard.setup.Environment
import org.jose4j.jws.AlgorithmIdentifiers.HMAC_SHA256
import org.jose4j.jws.JsonWebSignature
import org.jose4j.jwt.JwtClaims
@@ -28,6 +31,31 @@ object JwtAuth {
.setRelaxVerificationKeyValidation()
.build
+ def setupJwtAuth(environment: Environment): Unit = {
+ if (AmberConfig.isUserSystemEnabled) {
+ // register JWT Auth layer
+ environment.jersey.register(
+ new AuthDynamicFeature(
+ new JwtAuthFilter.Builder[SessionUser]()
+ .setJwtConsumer(jwtConsumer)
+ .setRealm("realm")
+ .setPrefix("Bearer")
+ .setAuthenticator(UserAuthenticator)
+ .setAuthorizer(UserRoleAuthorizer)
+ .buildAuthFilter()
+ )
+ )
+ } else {
+ // register Guest Auth layer
+ environment.jersey.register(
+ new AuthDynamicFeature(
+ new GuestAuthFilter.Builder().setAuthorizer(UserRoleAuthorizer).buildAuthFilter()
+ )
+ )
+ }
+
+ }
+
def jwtToken(claims: JwtClaims): String = {
val jws = new JsonWebSignature()
jws.setPayload(claims.toJson)
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/SessionUser.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/SessionUser.scala
index 62445306b8c..baee51b24fb 100644
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/SessionUser.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/SessionUser.scala
@@ -1,7 +1,7 @@
package edu.uci.ics.texera.web.auth
-import edu.uci.ics.texera.web.model.jooq.generated.enums.UserRole
-import edu.uci.ics.texera.web.model.jooq.generated.tables.pojos.User
+import edu.uci.ics.texera.dao.jooq.generated.enums.UserRole
+import edu.uci.ics.texera.dao.jooq.generated.tables.pojos.User
import org.jooq.types.UInteger
import java.security.Principal
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/UserAuthenticator.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/UserAuthenticator.scala
index 7d2a29f2fd3..2a4947dc2ce 100644
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/UserAuthenticator.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/UserAuthenticator.scala
@@ -1,7 +1,8 @@
package edu.uci.ics.texera.web.auth
+
import com.typesafe.scalalogging.LazyLogging
-import edu.uci.ics.texera.web.model.jooq.generated.enums.UserRole
-import edu.uci.ics.texera.web.model.jooq.generated.tables.pojos.User
+import edu.uci.ics.texera.dao.jooq.generated.enums.UserRole
+import edu.uci.ics.texera.dao.jooq.generated.tables.pojos.User
import io.dropwizard.auth.Authenticator
import org.jooq.types.UInteger
import org.jose4j.jwt.consumer.JwtContext
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/UserRoleAuthorizer.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/UserRoleAuthorizer.scala
index bd2583bcb1c..67ceade63ab 100644
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/UserRoleAuthorizer.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/auth/UserRoleAuthorizer.scala
@@ -1,6 +1,6 @@
package edu.uci.ics.texera.web.auth
-import edu.uci.ics.texera.web.model.jooq.generated.enums.UserRole
+import edu.uci.ics.texera.dao.jooq.generated.enums.UserRole
import io.dropwizard.auth.Authorizer
object UserRoleAuthorizer extends Authorizer[SessionUser] {
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/collab/event/CollabWebSocketEvent.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/collab/event/CollabWebSocketEvent.scala
index d1729bf6212..4ed43b09892 100644
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/collab/event/CollabWebSocketEvent.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/collab/event/CollabWebSocketEvent.scala
@@ -1,4 +1,5 @@
package edu.uci.ics.texera.web.model.collab.event
+
import com.fasterxml.jackson.annotation.JsonSubTypes.Type
import com.fasterxml.jackson.annotation.{JsonSubTypes, JsonTypeInfo}
import edu.uci.ics.texera.web.model.collab.response.HeartBeatResponse
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/collab/request/CollabWebSocketRequest.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/collab/request/CollabWebSocketRequest.scala
index 330262d5d3c..f5620692993 100644
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/collab/request/CollabWebSocketRequest.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/collab/request/CollabWebSocketRequest.scala
@@ -1,4 +1,5 @@
package edu.uci.ics.texera.web.model.collab.request
+
import com.fasterxml.jackson.annotation.JsonSubTypes.Type
import com.fasterxml.jackson.annotation.{JsonSubTypes, JsonTypeInfo}
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/collab/response/HeartBeatResponse.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/collab/response/HeartBeatResponse.scala
index 5324e09d05b..754e78bb7d6 100644
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/collab/response/HeartBeatResponse.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/collab/response/HeartBeatResponse.scala
@@ -1,4 +1,5 @@
package edu.uci.ics.texera.web.model.collab.response
+
import edu.uci.ics.texera.web.model.collab.event.CollabWebSocketEvent
case class HeartBeatResponse() extends CollabWebSocketEvent
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/common/AccessEntry.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/common/AccessEntry.scala
index 1c16296c6d2..a2f9db9dcd3 100644
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/common/AccessEntry.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/common/AccessEntry.scala
@@ -1,4 +1,5 @@
package edu.uci.ics.texera.web.model.common
+
import org.jooq.EnumType
case class AccessEntry(email: String, name: String, privilege: EnumType) {}
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/http/response/SchemaPropagationResponse.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/http/response/SchemaPropagationResponse.scala
index c017ce54ead..8bdb7b0563d 100644
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/http/response/SchemaPropagationResponse.scala
+++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/http/response/SchemaPropagationResponse.scala
@@ -1,6 +1,6 @@
package edu.uci.ics.texera.web.model.http.response
-import edu.uci.ics.amber.engine.common.model.tuple.Attribute
+import edu.uci.ics.amber.core.tuple.Attribute
case class SchemaPropagationResponse(
code: Int,
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/DefaultCatalog.java b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/DefaultCatalog.java
deleted file mode 100644
index b4d40f20a5d..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/DefaultCatalog.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * This file is generated by jOOQ.
- */
-package edu.uci.ics.texera.web.model.jooq.generated;
-
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import org.jooq.Schema;
-import org.jooq.impl.CatalogImpl;
-
-
-/**
- * This class is generated by jOOQ.
- */
-@SuppressWarnings({ "all", "unchecked", "rawtypes" })
-public class DefaultCatalog extends CatalogImpl {
-
- private static final long serialVersionUID = -625238328;
-
- /**
- * The reference instance of
- */
- public static final DefaultCatalog DEFAULT_CATALOG = new DefaultCatalog();
-
- /**
- * The schema
texera_db
.
- */
- public final TexeraDb TEXERA_DB = edu.uci.ics.texera.web.model.jooq.generated.TexeraDb.TEXERA_DB;
-
- /**
- * No further instances allowed
- */
- private DefaultCatalog() {
- super("");
- }
-
- @Override
- public final Listtexera_db
schema.
- */
-@SuppressWarnings({ "all", "unchecked", "rawtypes" })
-public class Indexes {
-
- // -------------------------------------------------------------------------
- // INDEX definitions
- // -------------------------------------------------------------------------
-
- public static final Index DATASET_IDX_DATASET_NAME_DESCRIPTION = Indexes0.DATASET_IDX_DATASET_NAME_DESCRIPTION;
- public static final Index DATASET_OWNER_UID = Indexes0.DATASET_OWNER_UID;
- public static final Index DATASET_PRIMARY = Indexes0.DATASET_PRIMARY;
- public static final Index DATASET_USER_ACCESS_PRIMARY = Indexes0.DATASET_USER_ACCESS_PRIMARY;
- public static final Index DATASET_USER_ACCESS_UID = Indexes0.DATASET_USER_ACCESS_UID;
- public static final Index DATASET_VERSION_DID = Indexes0.DATASET_VERSION_DID;
- public static final Index DATASET_VERSION_IDX_DATASET_VERSION_NAME = Indexes0.DATASET_VERSION_IDX_DATASET_VERSION_NAME;
- public static final Index DATASET_VERSION_PRIMARY = Indexes0.DATASET_VERSION_PRIMARY;
- public static final Index PROJECT_IDX_USER_PROJECT_NAME_DESCRIPTION = Indexes0.PROJECT_IDX_USER_PROJECT_NAME_DESCRIPTION;
- public static final Index PROJECT_OWNER_ID = Indexes0.PROJECT_OWNER_ID;
- public static final Index PROJECT_PRIMARY = Indexes0.PROJECT_PRIMARY;
- public static final Index PROJECT_USER_ACCESS_PID = Indexes0.PROJECT_USER_ACCESS_PID;
- public static final Index PROJECT_USER_ACCESS_PRIMARY = Indexes0.PROJECT_USER_ACCESS_PRIMARY;
- public static final Index PUBLIC_PROJECT_PRIMARY = Indexes0.PUBLIC_PROJECT_PRIMARY;
- public static final Index USER_EMAIL = Indexes0.USER_EMAIL;
- public static final Index USER_GOOGLE_ID = Indexes0.USER_GOOGLE_ID;
- public static final Index USER_IDX_USER_NAME = Indexes0.USER_IDX_USER_NAME;
- public static final Index USER_PRIMARY = Indexes0.USER_PRIMARY;
- public static final Index USER_CONFIG_PRIMARY = Indexes0.USER_CONFIG_PRIMARY;
- public static final Index WORKFLOW_IDX_WORKFLOW_NAME_DESCRIPTION_CONTENT = Indexes0.WORKFLOW_IDX_WORKFLOW_NAME_DESCRIPTION_CONTENT;
- public static final Index WORKFLOW_PRIMARY = Indexes0.WORKFLOW_PRIMARY;
- public static final Index WORKFLOW_EXECUTIONS_PRIMARY = Indexes0.WORKFLOW_EXECUTIONS_PRIMARY;
- public static final Index WORKFLOW_EXECUTIONS_UID = Indexes0.WORKFLOW_EXECUTIONS_UID;
- public static final Index WORKFLOW_EXECUTIONS_VID = Indexes0.WORKFLOW_EXECUTIONS_VID;
- public static final Index WORKFLOW_OF_PROJECT_PID = Indexes0.WORKFLOW_OF_PROJECT_PID;
- public static final Index WORKFLOW_OF_PROJECT_PRIMARY = Indexes0.WORKFLOW_OF_PROJECT_PRIMARY;
- public static final Index WORKFLOW_OF_USER_PRIMARY = Indexes0.WORKFLOW_OF_USER_PRIMARY;
- public static final Index WORKFLOW_OF_USER_WID = Indexes0.WORKFLOW_OF_USER_WID;
- public static final Index WORKFLOW_RUNTIME_STATISTICS_EXECUTION_ID = Indexes0.WORKFLOW_RUNTIME_STATISTICS_EXECUTION_ID;
- public static final Index WORKFLOW_RUNTIME_STATISTICS_PRIMARY = Indexes0.WORKFLOW_RUNTIME_STATISTICS_PRIMARY;
- public static final Index WORKFLOW_USER_ACCESS_PRIMARY = Indexes0.WORKFLOW_USER_ACCESS_PRIMARY;
- public static final Index WORKFLOW_USER_ACCESS_WID = Indexes0.WORKFLOW_USER_ACCESS_WID;
- public static final Index WORKFLOW_USER_CLONES_PRIMARY = Indexes0.WORKFLOW_USER_CLONES_PRIMARY;
- public static final Index WORKFLOW_USER_CLONES_WID = Indexes0.WORKFLOW_USER_CLONES_WID;
- public static final Index WORKFLOW_USER_LIKES_PRIMARY = Indexes0.WORKFLOW_USER_LIKES_PRIMARY;
- public static final Index WORKFLOW_USER_LIKES_WID = Indexes0.WORKFLOW_USER_LIKES_WID;
- public static final Index WORKFLOW_VERSION_PRIMARY = Indexes0.WORKFLOW_VERSION_PRIMARY;
- public static final Index WORKFLOW_VERSION_WID = Indexes0.WORKFLOW_VERSION_WID;
- public static final Index WORKFLOW_VIEW_COUNT_PRIMARY = Indexes0.WORKFLOW_VIEW_COUNT_PRIMARY;
-
- // -------------------------------------------------------------------------
- // [#1459] distribute members to avoid static initialisers > 64kb
- // -------------------------------------------------------------------------
-
- private static class Indexes0 {
- public static Index DATASET_IDX_DATASET_NAME_DESCRIPTION = Internal.createIndex("idx_dataset_name_description", Dataset.DATASET, new OrderField[] { Dataset.DATASET.NAME, Dataset.DATASET.DESCRIPTION }, false);
- public static Index DATASET_OWNER_UID = Internal.createIndex("owner_uid", Dataset.DATASET, new OrderField[] { Dataset.DATASET.OWNER_UID }, false);
- public static Index DATASET_PRIMARY = Internal.createIndex("PRIMARY", Dataset.DATASET, new OrderField[] { Dataset.DATASET.DID }, true);
- public static Index DATASET_USER_ACCESS_PRIMARY = Internal.createIndex("PRIMARY", DatasetUserAccess.DATASET_USER_ACCESS, new OrderField[] { DatasetUserAccess.DATASET_USER_ACCESS.DID, DatasetUserAccess.DATASET_USER_ACCESS.UID }, true);
- public static Index DATASET_USER_ACCESS_UID = Internal.createIndex("uid", DatasetUserAccess.DATASET_USER_ACCESS, new OrderField[] { DatasetUserAccess.DATASET_USER_ACCESS.UID }, false);
- public static Index DATASET_VERSION_DID = Internal.createIndex("did", DatasetVersion.DATASET_VERSION, new OrderField[] { DatasetVersion.DATASET_VERSION.DID }, false);
- public static Index DATASET_VERSION_IDX_DATASET_VERSION_NAME = Internal.createIndex("idx_dataset_version_name", DatasetVersion.DATASET_VERSION, new OrderField[] { DatasetVersion.DATASET_VERSION.NAME }, false);
- public static Index DATASET_VERSION_PRIMARY = Internal.createIndex("PRIMARY", DatasetVersion.DATASET_VERSION, new OrderField[] { DatasetVersion.DATASET_VERSION.DVID }, true);
- public static Index PROJECT_IDX_USER_PROJECT_NAME_DESCRIPTION = Internal.createIndex("idx_user_project_name_description", Project.PROJECT, new OrderField[] { Project.PROJECT.NAME, Project.PROJECT.DESCRIPTION }, false);
- public static Index PROJECT_OWNER_ID = Internal.createIndex("owner_id", Project.PROJECT, new OrderField[] { Project.PROJECT.OWNER_ID, Project.PROJECT.NAME }, true);
- public static Index PROJECT_PRIMARY = Internal.createIndex("PRIMARY", Project.PROJECT, new OrderField[] { Project.PROJECT.PID }, true);
- public static Index PROJECT_USER_ACCESS_PID = Internal.createIndex("pid", ProjectUserAccess.PROJECT_USER_ACCESS, new OrderField[] { ProjectUserAccess.PROJECT_USER_ACCESS.PID }, false);
- public static Index PROJECT_USER_ACCESS_PRIMARY = Internal.createIndex("PRIMARY", ProjectUserAccess.PROJECT_USER_ACCESS, new OrderField[] { ProjectUserAccess.PROJECT_USER_ACCESS.UID, ProjectUserAccess.PROJECT_USER_ACCESS.PID }, true);
- public static Index PUBLIC_PROJECT_PRIMARY = Internal.createIndex("PRIMARY", PublicProject.PUBLIC_PROJECT, new OrderField[] { PublicProject.PUBLIC_PROJECT.PID }, true);
- public static Index USER_EMAIL = Internal.createIndex("email", User.USER, new OrderField[] { User.USER.EMAIL }, true);
- public static Index USER_GOOGLE_ID = Internal.createIndex("google_id", User.USER, new OrderField[] { User.USER.GOOGLE_ID }, true);
- public static Index USER_IDX_USER_NAME = Internal.createIndex("idx_user_name", User.USER, new OrderField[] { User.USER.NAME }, false);
- public static Index USER_PRIMARY = Internal.createIndex("PRIMARY", User.USER, new OrderField[] { User.USER.UID }, true);
- public static Index USER_CONFIG_PRIMARY = Internal.createIndex("PRIMARY", UserConfig.USER_CONFIG, new OrderField[] { UserConfig.USER_CONFIG.UID, UserConfig.USER_CONFIG.KEY }, true);
- public static Index WORKFLOW_IDX_WORKFLOW_NAME_DESCRIPTION_CONTENT = Internal.createIndex("idx_workflow_name_description_content", Workflow.WORKFLOW, new OrderField[] { Workflow.WORKFLOW.NAME, Workflow.WORKFLOW.DESCRIPTION, Workflow.WORKFLOW.CONTENT }, false);
- public static Index WORKFLOW_PRIMARY = Internal.createIndex("PRIMARY", Workflow.WORKFLOW, new OrderField[] { Workflow.WORKFLOW.WID }, true);
- public static Index WORKFLOW_EXECUTIONS_PRIMARY = Internal.createIndex("PRIMARY", WorkflowExecutions.WORKFLOW_EXECUTIONS, new OrderField[] { WorkflowExecutions.WORKFLOW_EXECUTIONS.EID }, true);
- public static Index WORKFLOW_EXECUTIONS_UID = Internal.createIndex("uid", WorkflowExecutions.WORKFLOW_EXECUTIONS, new OrderField[] { WorkflowExecutions.WORKFLOW_EXECUTIONS.UID }, false);
- public static Index WORKFLOW_EXECUTIONS_VID = Internal.createIndex("vid", WorkflowExecutions.WORKFLOW_EXECUTIONS, new OrderField[] { WorkflowExecutions.WORKFLOW_EXECUTIONS.VID }, false);
- public static Index WORKFLOW_OF_PROJECT_PID = Internal.createIndex("pid", WorkflowOfProject.WORKFLOW_OF_PROJECT, new OrderField[] { WorkflowOfProject.WORKFLOW_OF_PROJECT.PID }, false);
- public static Index WORKFLOW_OF_PROJECT_PRIMARY = Internal.createIndex("PRIMARY", WorkflowOfProject.WORKFLOW_OF_PROJECT, new OrderField[] { WorkflowOfProject.WORKFLOW_OF_PROJECT.WID, WorkflowOfProject.WORKFLOW_OF_PROJECT.PID }, true);
- public static Index WORKFLOW_OF_USER_PRIMARY = Internal.createIndex("PRIMARY", WorkflowOfUser.WORKFLOW_OF_USER, new OrderField[] { WorkflowOfUser.WORKFLOW_OF_USER.UID, WorkflowOfUser.WORKFLOW_OF_USER.WID }, true);
- public static Index WORKFLOW_OF_USER_WID = Internal.createIndex("wid", WorkflowOfUser.WORKFLOW_OF_USER, new OrderField[] { WorkflowOfUser.WORKFLOW_OF_USER.WID }, false);
- public static Index WORKFLOW_RUNTIME_STATISTICS_EXECUTION_ID = Internal.createIndex("execution_id", WorkflowRuntimeStatistics.WORKFLOW_RUNTIME_STATISTICS, new OrderField[] { WorkflowRuntimeStatistics.WORKFLOW_RUNTIME_STATISTICS.EXECUTION_ID }, false);
- public static Index WORKFLOW_RUNTIME_STATISTICS_PRIMARY = Internal.createIndex("PRIMARY", WorkflowRuntimeStatistics.WORKFLOW_RUNTIME_STATISTICS, new OrderField[] { WorkflowRuntimeStatistics.WORKFLOW_RUNTIME_STATISTICS.WORKFLOW_ID, WorkflowRuntimeStatistics.WORKFLOW_RUNTIME_STATISTICS.EXECUTION_ID, WorkflowRuntimeStatistics.WORKFLOW_RUNTIME_STATISTICS.OPERATOR_ID, WorkflowRuntimeStatistics.WORKFLOW_RUNTIME_STATISTICS.TIME }, true);
- public static Index WORKFLOW_USER_ACCESS_PRIMARY = Internal.createIndex("PRIMARY", WorkflowUserAccess.WORKFLOW_USER_ACCESS, new OrderField[] { WorkflowUserAccess.WORKFLOW_USER_ACCESS.UID, WorkflowUserAccess.WORKFLOW_USER_ACCESS.WID }, true);
- public static Index WORKFLOW_USER_ACCESS_WID = Internal.createIndex("wid", WorkflowUserAccess.WORKFLOW_USER_ACCESS, new OrderField[] { WorkflowUserAccess.WORKFLOW_USER_ACCESS.WID }, false);
- public static Index WORKFLOW_USER_CLONES_PRIMARY = Internal.createIndex("PRIMARY", WorkflowUserClones.WORKFLOW_USER_CLONES, new OrderField[] { WorkflowUserClones.WORKFLOW_USER_CLONES.UID, WorkflowUserClones.WORKFLOW_USER_CLONES.WID }, true);
- public static Index WORKFLOW_USER_CLONES_WID = Internal.createIndex("wid", WorkflowUserClones.WORKFLOW_USER_CLONES, new OrderField[] { WorkflowUserClones.WORKFLOW_USER_CLONES.WID }, false);
- public static Index WORKFLOW_USER_LIKES_PRIMARY = Internal.createIndex("PRIMARY", WorkflowUserLikes.WORKFLOW_USER_LIKES, new OrderField[] { WorkflowUserLikes.WORKFLOW_USER_LIKES.UID, WorkflowUserLikes.WORKFLOW_USER_LIKES.WID }, true);
- public static Index WORKFLOW_USER_LIKES_WID = Internal.createIndex("wid", WorkflowUserLikes.WORKFLOW_USER_LIKES, new OrderField[] { WorkflowUserLikes.WORKFLOW_USER_LIKES.WID }, false);
- public static Index WORKFLOW_VERSION_PRIMARY = Internal.createIndex("PRIMARY", WorkflowVersion.WORKFLOW_VERSION, new OrderField[] { WorkflowVersion.WORKFLOW_VERSION.VID }, true);
- public static Index WORKFLOW_VERSION_WID = Internal.createIndex("wid", WorkflowVersion.WORKFLOW_VERSION, new OrderField[] { WorkflowVersion.WORKFLOW_VERSION.WID }, false);
- public static Index WORKFLOW_VIEW_COUNT_PRIMARY = Internal.createIndex("PRIMARY", WorkflowViewCount.WORKFLOW_VIEW_COUNT, new OrderField[] { WorkflowViewCount.WORKFLOW_VIEW_COUNT.WID }, true);
- }
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/Keys.java b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/Keys.java
deleted file mode 100644
index 7fc1003ecda..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/Keys.java
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
- * This file is generated by jOOQ.
- */
-package edu.uci.ics.texera.web.model.jooq.generated;
-
-
-import edu.uci.ics.texera.web.model.jooq.generated.tables.Dataset;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.DatasetUserAccess;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.DatasetVersion;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.Project;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.ProjectUserAccess;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.PublicProject;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.User;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.UserConfig;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.Workflow;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowExecutions;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowOfProject;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowOfUser;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowRuntimeStatistics;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowUserAccess;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowUserClones;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowUserLikes;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowVersion;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowViewCount;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.DatasetRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.DatasetUserAccessRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.DatasetVersionRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.ProjectRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.ProjectUserAccessRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.PublicProjectRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.UserConfigRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.UserRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.WorkflowExecutionsRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.WorkflowOfProjectRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.WorkflowOfUserRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.WorkflowRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.WorkflowRuntimeStatisticsRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.WorkflowUserAccessRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.WorkflowUserClonesRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.WorkflowUserLikesRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.WorkflowVersionRecord;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.WorkflowViewCountRecord;
-
-import org.jooq.ForeignKey;
-import org.jooq.Identity;
-import org.jooq.UniqueKey;
-import org.jooq.impl.Internal;
-import org.jooq.types.UInteger;
-
-
-/**
- * A class modelling foreign key relationships and constraints of tables of
- * the texera_db
schema.
- */
-@SuppressWarnings({ "all", "unchecked", "rawtypes" })
-public class Keys {
-
- // -------------------------------------------------------------------------
- // IDENTITY definitions
- // -------------------------------------------------------------------------
-
- public static final Identitytexera_db.dataset
.
- */
- public static final Dataset DATASET = Dataset.DATASET;
-
- /**
- * The table texera_db.dataset_user_access
.
- */
- public static final DatasetUserAccess DATASET_USER_ACCESS = DatasetUserAccess.DATASET_USER_ACCESS;
-
- /**
- * The table texera_db.dataset_version
.
- */
- public static final DatasetVersion DATASET_VERSION = DatasetVersion.DATASET_VERSION;
-
- /**
- * The table texera_db.project
.
- */
- public static final Project PROJECT = Project.PROJECT;
-
- /**
- * The table texera_db.project_user_access
.
- */
- public static final ProjectUserAccess PROJECT_USER_ACCESS = ProjectUserAccess.PROJECT_USER_ACCESS;
-
- /**
- * The table texera_db.public_project
.
- */
- public static final PublicProject PUBLIC_PROJECT = PublicProject.PUBLIC_PROJECT;
-
- /**
- * The table texera_db.user
.
- */
- public static final User USER = User.USER;
-
- /**
- * The table texera_db.user_config
.
- */
- public static final UserConfig USER_CONFIG = UserConfig.USER_CONFIG;
-
- /**
- * The table texera_db.workflow
.
- */
- public static final Workflow WORKFLOW = Workflow.WORKFLOW;
-
- /**
- * The table texera_db.workflow_executions
.
- */
- public static final WorkflowExecutions WORKFLOW_EXECUTIONS = WorkflowExecutions.WORKFLOW_EXECUTIONS;
-
- /**
- * The table texera_db.workflow_of_project
.
- */
- public static final WorkflowOfProject WORKFLOW_OF_PROJECT = WorkflowOfProject.WORKFLOW_OF_PROJECT;
-
- /**
- * The table texera_db.workflow_of_user
.
- */
- public static final WorkflowOfUser WORKFLOW_OF_USER = WorkflowOfUser.WORKFLOW_OF_USER;
-
- /**
- * The table texera_db.workflow_runtime_statistics
.
- */
- public static final WorkflowRuntimeStatistics WORKFLOW_RUNTIME_STATISTICS = WorkflowRuntimeStatistics.WORKFLOW_RUNTIME_STATISTICS;
-
- /**
- * The table texera_db.workflow_user_access
.
- */
- public static final WorkflowUserAccess WORKFLOW_USER_ACCESS = WorkflowUserAccess.WORKFLOW_USER_ACCESS;
-
- /**
- * The table texera_db.workflow_user_activity
.
- */
- public static final WorkflowUserActivity WORKFLOW_USER_ACTIVITY = WorkflowUserActivity.WORKFLOW_USER_ACTIVITY;
-
- /**
- * The table texera_db.workflow_user_clones
.
- */
- public static final WorkflowUserClones WORKFLOW_USER_CLONES = WorkflowUserClones.WORKFLOW_USER_CLONES;
-
- /**
- * The table texera_db.workflow_user_likes
.
- */
- public static final WorkflowUserLikes WORKFLOW_USER_LIKES = WorkflowUserLikes.WORKFLOW_USER_LIKES;
-
- /**
- * The table texera_db.workflow_version
.
- */
- public static final WorkflowVersion WORKFLOW_VERSION = WorkflowVersion.WORKFLOW_VERSION;
-
- /**
- * The table texera_db.workflow_view_count
.
- */
- public static final WorkflowViewCount WORKFLOW_VIEW_COUNT = WorkflowViewCount.WORKFLOW_VIEW_COUNT;
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/TexeraDb.java b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/TexeraDb.java
deleted file mode 100644
index b9b180ad904..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/TexeraDb.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * This file is generated by jOOQ.
- */
-package edu.uci.ics.texera.web.model.jooq.generated;
-
-
-import edu.uci.ics.texera.web.model.jooq.generated.tables.Dataset;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.DatasetUserAccess;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.DatasetVersion;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.Project;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.ProjectUserAccess;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.PublicProject;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.User;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.UserConfig;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.Workflow;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowExecutions;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowOfProject;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowOfUser;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowRuntimeStatistics;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowUserAccess;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowUserActivity;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowUserClones;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowUserLikes;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowVersion;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowViewCount;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import org.jooq.Catalog;
-import org.jooq.Table;
-import org.jooq.impl.SchemaImpl;
-
-
-/**
- * This class is generated by jOOQ.
- */
-@SuppressWarnings({ "all", "unchecked", "rawtypes" })
-public class TexeraDb extends SchemaImpl {
-
- private static final long serialVersionUID = 2026314588;
-
- /**
- * The reference instance of texera_db
- */
- public static final TexeraDb TEXERA_DB = new TexeraDb();
-
- /**
- * The table texera_db.dataset
.
- */
- public final Dataset DATASET = edu.uci.ics.texera.web.model.jooq.generated.tables.Dataset.DATASET;
-
- /**
- * The table texera_db.dataset_user_access
.
- */
- public final DatasetUserAccess DATASET_USER_ACCESS = edu.uci.ics.texera.web.model.jooq.generated.tables.DatasetUserAccess.DATASET_USER_ACCESS;
-
- /**
- * The table texera_db.dataset_version
.
- */
- public final DatasetVersion DATASET_VERSION = edu.uci.ics.texera.web.model.jooq.generated.tables.DatasetVersion.DATASET_VERSION;
-
- /**
- * The table texera_db.project
.
- */
- public final Project PROJECT = edu.uci.ics.texera.web.model.jooq.generated.tables.Project.PROJECT;
-
- /**
- * The table texera_db.project_user_access
.
- */
- public final ProjectUserAccess PROJECT_USER_ACCESS = edu.uci.ics.texera.web.model.jooq.generated.tables.ProjectUserAccess.PROJECT_USER_ACCESS;
-
- /**
- * The table texera_db.public_project
.
- */
- public final PublicProject PUBLIC_PROJECT = edu.uci.ics.texera.web.model.jooq.generated.tables.PublicProject.PUBLIC_PROJECT;
-
- /**
- * The table texera_db.user
.
- */
- public final User USER = edu.uci.ics.texera.web.model.jooq.generated.tables.User.USER;
-
- /**
- * The table texera_db.user_config
.
- */
- public final UserConfig USER_CONFIG = edu.uci.ics.texera.web.model.jooq.generated.tables.UserConfig.USER_CONFIG;
-
- /**
- * The table texera_db.workflow
.
- */
- public final Workflow WORKFLOW = edu.uci.ics.texera.web.model.jooq.generated.tables.Workflow.WORKFLOW;
-
- /**
- * The table texera_db.workflow_executions
.
- */
- public final WorkflowExecutions WORKFLOW_EXECUTIONS = edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowExecutions.WORKFLOW_EXECUTIONS;
-
- /**
- * The table texera_db.workflow_of_project
.
- */
- public final WorkflowOfProject WORKFLOW_OF_PROJECT = edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowOfProject.WORKFLOW_OF_PROJECT;
-
- /**
- * The table texera_db.workflow_of_user
.
- */
- public final WorkflowOfUser WORKFLOW_OF_USER = edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowOfUser.WORKFLOW_OF_USER;
-
- /**
- * The table texera_db.workflow_runtime_statistics
.
- */
- public final WorkflowRuntimeStatistics WORKFLOW_RUNTIME_STATISTICS = edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowRuntimeStatistics.WORKFLOW_RUNTIME_STATISTICS;
-
- /**
- * The table texera_db.workflow_user_access
.
- */
- public final WorkflowUserAccess WORKFLOW_USER_ACCESS = edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowUserAccess.WORKFLOW_USER_ACCESS;
-
- /**
- * The table texera_db.workflow_user_activity
.
- */
- public final WorkflowUserActivity WORKFLOW_USER_ACTIVITY = edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowUserActivity.WORKFLOW_USER_ACTIVITY;
-
- /**
- * The table texera_db.workflow_user_clones
.
- */
- public final WorkflowUserClones WORKFLOW_USER_CLONES = edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowUserClones.WORKFLOW_USER_CLONES;
-
- /**
- * The table texera_db.workflow_user_likes
.
- */
- public final WorkflowUserLikes WORKFLOW_USER_LIKES = edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowUserLikes.WORKFLOW_USER_LIKES;
-
- /**
- * The table texera_db.workflow_version
.
- */
- public final WorkflowVersion WORKFLOW_VERSION = edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowVersion.WORKFLOW_VERSION;
-
- /**
- * The table texera_db.workflow_view_count
.
- */
- public final WorkflowViewCount WORKFLOW_VIEW_COUNT = edu.uci.ics.texera.web.model.jooq.generated.tables.WorkflowViewCount.WORKFLOW_VIEW_COUNT;
-
- /**
- * No further instances allowed
- */
- private TexeraDb() {
- super("texera_db", null);
- }
-
-
- @Override
- public Catalog getCatalog() {
- return DefaultCatalog.DEFAULT_CATALOG;
- }
-
- @Override
- public final List> getTables() {
- List result = new ArrayList();
- result.addAll(getTables0());
- return result;
- }
-
- private final List
> getTables0() {
- return Arrays.
>asList(
- Dataset.DATASET,
- DatasetUserAccess.DATASET_USER_ACCESS,
- DatasetVersion.DATASET_VERSION,
- Project.PROJECT,
- ProjectUserAccess.PROJECT_USER_ACCESS,
- PublicProject.PUBLIC_PROJECT,
- User.USER,
- UserConfig.USER_CONFIG,
- Workflow.WORKFLOW,
- WorkflowExecutions.WORKFLOW_EXECUTIONS,
- WorkflowOfProject.WORKFLOW_OF_PROJECT,
- WorkflowOfUser.WORKFLOW_OF_USER,
- WorkflowRuntimeStatistics.WORKFLOW_RUNTIME_STATISTICS,
- WorkflowUserAccess.WORKFLOW_USER_ACCESS,
- WorkflowUserActivity.WORKFLOW_USER_ACTIVITY,
- WorkflowUserClones.WORKFLOW_USER_CLONES,
- WorkflowUserLikes.WORKFLOW_USER_LIKES,
- WorkflowVersion.WORKFLOW_VERSION,
- WorkflowViewCount.WORKFLOW_VIEW_COUNT);
- }
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/enums/DatasetUserAccessPrivilege.java b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/enums/DatasetUserAccessPrivilege.java
deleted file mode 100644
index 09b530b3124..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/enums/DatasetUserAccessPrivilege.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * This file is generated by jOOQ.
- */
-package edu.uci.ics.texera.web.model.jooq.generated.enums;
-
-
-import org.jooq.Catalog;
-import org.jooq.EnumType;
-import org.jooq.Schema;
-
-
-/**
- * This class is generated by jOOQ.
- */
-@SuppressWarnings({ "all", "unchecked", "rawtypes" })
-public enum DatasetUserAccessPrivilege implements EnumType {
-
- NONE("NONE"),
-
- READ("READ"),
-
- WRITE("WRITE");
-
- private final String literal;
-
- private DatasetUserAccessPrivilege(String literal) {
- this.literal = literal;
- }
-
- @Override
- public Catalog getCatalog() {
- return null;
- }
-
- @Override
- public Schema getSchema() {
- return null;
- }
-
- @Override
- public String getName() {
- return "dataset_user_access_privilege";
- }
-
- @Override
- public String getLiteral() {
- return literal;
- }
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/enums/ProjectUserAccessPrivilege.java b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/enums/ProjectUserAccessPrivilege.java
deleted file mode 100644
index 83c81318da7..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/enums/ProjectUserAccessPrivilege.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * This file is generated by jOOQ.
- */
-package edu.uci.ics.texera.web.model.jooq.generated.enums;
-
-
-import org.jooq.Catalog;
-import org.jooq.EnumType;
-import org.jooq.Schema;
-
-
-/**
- * This class is generated by jOOQ.
- */
-@SuppressWarnings({ "all", "unchecked", "rawtypes" })
-public enum ProjectUserAccessPrivilege implements EnumType {
-
- NONE("NONE"),
-
- READ("READ"),
-
- WRITE("WRITE");
-
- private final String literal;
-
- private ProjectUserAccessPrivilege(String literal) {
- this.literal = literal;
- }
-
- @Override
- public Catalog getCatalog() {
- return null;
- }
-
- @Override
- public Schema getSchema() {
- return null;
- }
-
- @Override
- public String getName() {
- return "project_user_access_privilege";
- }
-
- @Override
- public String getLiteral() {
- return literal;
- }
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/enums/UserRole.java b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/enums/UserRole.java
deleted file mode 100644
index 627cc39646c..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/enums/UserRole.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * This file is generated by jOOQ.
- */
-package edu.uci.ics.texera.web.model.jooq.generated.enums;
-
-
-import org.jooq.Catalog;
-import org.jooq.EnumType;
-import org.jooq.Schema;
-
-
-/**
- * This class is generated by jOOQ.
- */
-@SuppressWarnings({ "all", "unchecked", "rawtypes" })
-public enum UserRole implements EnumType {
-
- INACTIVE("INACTIVE"),
-
- RESTRICTED("RESTRICTED"),
-
- REGULAR("REGULAR"),
-
- ADMIN("ADMIN");
-
- private final String literal;
-
- private UserRole(String literal) {
- this.literal = literal;
- }
-
- @Override
- public Catalog getCatalog() {
- return null;
- }
-
- @Override
- public Schema getSchema() {
- return null;
- }
-
- @Override
- public String getName() {
- return "user_role";
- }
-
- @Override
- public String getLiteral() {
- return literal;
- }
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/enums/WorkflowUserAccessPrivilege.java b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/enums/WorkflowUserAccessPrivilege.java
deleted file mode 100644
index 9ef65ca0cec..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/enums/WorkflowUserAccessPrivilege.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * This file is generated by jOOQ.
- */
-package edu.uci.ics.texera.web.model.jooq.generated.enums;
-
-
-import org.jooq.Catalog;
-import org.jooq.EnumType;
-import org.jooq.Schema;
-
-
-/**
- * This class is generated by jOOQ.
- */
-@SuppressWarnings({ "all", "unchecked", "rawtypes" })
-public enum WorkflowUserAccessPrivilege implements EnumType {
-
- NONE("NONE"),
-
- READ("READ"),
-
- WRITE("WRITE");
-
- private final String literal;
-
- private WorkflowUserAccessPrivilege(String literal) {
- this.literal = literal;
- }
-
- @Override
- public Catalog getCatalog() {
- return null;
- }
-
- @Override
- public Schema getSchema() {
- return null;
- }
-
- @Override
- public String getName() {
- return "workflow_user_access_privilege";
- }
-
- @Override
- public String getLiteral() {
- return literal;
- }
-}
diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/tables/Dataset.java b/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/tables/Dataset.java
deleted file mode 100644
index 6ec0034bf4c..00000000000
--- a/core/amber/src/main/scala/edu/uci/ics/texera/web/model/jooq/generated/tables/Dataset.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * This file is generated by jOOQ.
- */
-package edu.uci.ics.texera.web.model.jooq.generated.tables;
-
-
-import edu.uci.ics.texera.web.model.jooq.generated.Indexes;
-import edu.uci.ics.texera.web.model.jooq.generated.Keys;
-import edu.uci.ics.texera.web.model.jooq.generated.TexeraDb;
-import edu.uci.ics.texera.web.model.jooq.generated.tables.records.DatasetRecord;
-
-import java.sql.Timestamp;
-import java.util.Arrays;
-import java.util.List;
-
-import org.jooq.Field;
-import org.jooq.ForeignKey;
-import org.jooq.Identity;
-import org.jooq.Index;
-import org.jooq.Name;
-import org.jooq.Record;
-import org.jooq.Row6;
-import org.jooq.Schema;
-import org.jooq.Table;
-import org.jooq.TableField;
-import org.jooq.UniqueKey;
-import org.jooq.impl.DSL;
-import org.jooq.impl.TableImpl;
-import org.jooq.types.UInteger;
-
-
-/**
- * This class is generated by jOOQ.
- */
-@SuppressWarnings({ "all", "unchecked", "rawtypes" })
-public class Dataset extends TableImpl
texera_db.dataset
- */
- public static final Dataset DATASET = new Dataset();
-
- /**
- * The class holding records for this type
- */
- @Override
- public Classtexera_db.dataset.did
.
- */
- public final TableFieldtexera_db.dataset.owner_uid
.
- */
- public final TableFieldtexera_db.dataset.name
.
- */
- public final TableFieldtexera_db.dataset.is_public
.
- */
- public final TableFieldtexera_db.dataset.description
.
- */
- public final TableFieldtexera_db.dataset.creation_time
.
- */
- public final TableFieldtexera_db.dataset
table reference
- */
- public Dataset() {
- this(DSL.name("dataset"), null);
- }
-
- /**
- * Create an aliased texera_db.dataset
table reference
- */
- public Dataset(String alias) {
- this(DSL.name(alias), DATASET);
- }
-
- /**
- * Create an aliased texera_db.dataset
table reference
- */
- public Dataset(Name alias) {
- this(alias, DATASET);
- }
-
- private Dataset(Name alias, Tabletexera_db.dataset_user_access
- */
- public static final DatasetUserAccess DATASET_USER_ACCESS = new DatasetUserAccess();
-
- /**
- * The class holding records for this type
- */
- @Override
- public Classtexera_db.dataset_user_access.did
.
- */
- public final TableFieldtexera_db.dataset_user_access.uid
.
- */
- public final TableFieldtexera_db.dataset_user_access.privilege
.
- */
- public final TableFieldtexera_db.dataset_user_access
table reference
- */
- public DatasetUserAccess() {
- this(DSL.name("dataset_user_access"), null);
- }
-
- /**
- * Create an aliased texera_db.dataset_user_access
table reference
- */
- public DatasetUserAccess(String alias) {
- this(DSL.name(alias), DATASET_USER_ACCESS);
- }
-
- /**
- * Create an aliased texera_db.dataset_user_access
table reference
- */
- public DatasetUserAccess(Name alias) {
- this(alias, DATASET_USER_ACCESS);
- }
-
- private DatasetUserAccess(Name alias, Tabletexera_db.dataset_version
- */
- public static final DatasetVersion DATASET_VERSION = new DatasetVersion();
-
- /**
- * The class holding records for this type
- */
- @Override
- public Classtexera_db.dataset_version.dvid
.
- */
- public final TableFieldtexera_db.dataset_version.did
.
- */
- public final TableFieldtexera_db.dataset_version.creator_uid
.
- */
- public final TableFieldtexera_db.dataset_version.name
.
- */
- public final TableFieldtexera_db.dataset_version.version_hash
.
- */
- public final TableFieldtexera_db.dataset_version.creation_time
.
- */
- public final TableFieldtexera_db.dataset_version
table reference
- */
- public DatasetVersion() {
- this(DSL.name("dataset_version"), null);
- }
-
- /**
- * Create an aliased texera_db.dataset_version
table reference
- */
- public DatasetVersion(String alias) {
- this(DSL.name(alias), DATASET_VERSION);
- }
-
- /**
- * Create an aliased texera_db.dataset_version
table reference
- */
- public DatasetVersion(Name alias) {
- this(alias, DATASET_VERSION);
- }
-
- private DatasetVersion(Name alias, Tabletexera_db.project
- */
- public static final Project PROJECT = new Project();
-
- /**
- * The class holding records for this type
- */
- @Override
- public Classtexera_db.project.pid
.
- */
- public final TableFieldtexera_db.project.name
.
- */
- public final TableFieldtexera_db.project.description
.
- */
- public final TableFieldtexera_db.project.owner_id
.
- */
- public final TableFieldtexera_db.project.creation_time
.
- */
- public final TableFieldtexera_db.project.color
.
- */
- public final TableFieldtexera_db.project
table reference
- */
- public Project() {
- this(DSL.name("project"), null);
- }
-
- /**
- * Create an aliased texera_db.project
table reference
- */
- public Project(String alias) {
- this(DSL.name(alias), PROJECT);
- }
-
- /**
- * Create an aliased texera_db.project
table reference
- */
- public Project(Name alias) {
- this(alias, PROJECT);
- }
-
- private Project(Name alias, Tabletexera_db.project_user_access
- */
- public static final ProjectUserAccess PROJECT_USER_ACCESS = new ProjectUserAccess();
-
- /**
- * The class holding records for this type
- */
- @Override
- public Classtexera_db.project_user_access.uid
.
- */
- public final TableFieldtexera_db.project_user_access.pid
.
- */
- public final TableFieldtexera_db.project_user_access.privilege
.
- */
- public final TableFieldtexera_db.project_user_access
table reference
- */
- public ProjectUserAccess() {
- this(DSL.name("project_user_access"), null);
- }
-
- /**
- * Create an aliased texera_db.project_user_access
table reference
- */
- public ProjectUserAccess(String alias) {
- this(DSL.name(alias), PROJECT_USER_ACCESS);
- }
-
- /**
- * Create an aliased texera_db.project_user_access
table reference
- */
- public ProjectUserAccess(Name alias) {
- this(alias, PROJECT_USER_ACCESS);
- }
-
- private ProjectUserAccess(Name alias, Tabletexera_db.public_project
- */
- public static final PublicProject PUBLIC_PROJECT = new PublicProject();
-
- /**
- * The class holding records for this type
- */
- @Override
- public Classtexera_db.public_project.pid
.
- */
- public final TableFieldtexera_db.public_project.uid
.
- */
- public final TableFieldtexera_db.public_project
table reference
- */
- public PublicProject() {
- this(DSL.name("public_project"), null);
- }
-
- /**
- * Create an aliased texera_db.public_project
table reference
- */
- public PublicProject(String alias) {
- this(DSL.name(alias), PUBLIC_PROJECT);
- }
-
- /**
- * Create an aliased texera_db.public_project
table reference
- */
- public PublicProject(Name alias) {
- this(alias, PUBLIC_PROJECT);
- }
-
- private PublicProject(Name alias, Tabletexera_db.user
- */
- public static final User USER = new User();
-
- /**
- * The class holding records for this type
- */
- @Override
- public Classtexera_db.user.uid
.
- */
- public final TableFieldtexera_db.user.name
.
- */
- public final TableFieldtexera_db.user.email
.
- */
- public final TableFieldtexera_db.user.password
.
- */
- public final TableFieldtexera_db.user.google_id
.
- */
- public final TableFieldtexera_db.user.role
.
- */
- public final TableFieldtexera_db.user.google_avatar
.
- */
- public final TableFieldtexera_db.user
table reference
- */
- public User() {
- this(DSL.name("user"), null);
- }
-
- /**
- * Create an aliased texera_db.user
table reference
- */
- public User(String alias) {
- this(DSL.name(alias), USER);
- }
-
- /**
- * Create an aliased texera_db.user
table reference
- */
- public User(Name alias) {
- this(alias, USER);
- }
-
- private User(Name alias, Table