diff --git a/.gitignore b/.gitignore index 4fbd384afe6..4fc59463837 100644 --- a/.gitignore +++ b/.gitignore @@ -108,4 +108,35 @@ Composer-Setup.exe .metals/ .bloop/ .ammonite/ -metals.sbt \ No newline at end of file +metals.sbt + + +# Ignore Helm-specific artifacts in texera-helmchart +core/scripts/texera-helmchart/*.tgz +core/scripts/texera-helmchart/charts/ +core/scripts/texera-helmchart/tmpcharts/ +core/scripts/texera-helmchart/*.lock +core/scripts/texera-helmchart/test-output/ +core/scripts/texera-helmchart/tests/test-*.yaml +core/scripts/texera-helmchart/*.helmignore + +# Ignore general OS-specific files and editor files in texera-helmchart +core/scripts/texera-helmchart/.DS_Store +core/scripts/texera-helmchart/Thumbs.db +core/scripts/texera-helmchart/*.swp +core/scripts/texera-helmchart/*.swo +core/scripts/texera-helmchart/*.bak +core/scripts/texera-helmchart/*~ +core/scripts/texera-helmchart/.idea/ +core/scripts/texera-helmchart/.vscode/ +core/scripts/texera-helmchart/*.log + +# Temporary files in texera-helmchart +core/scripts/texera-helmchart/*.tmp +core/scripts/texera-helmchart/*.out +core/scripts/texera-helmchart/*.cache +core/scripts/texera-helmchart/*.pyc +core/scripts/texera-helmchart/__pycache__/ + +# Build artifacts in texera-helmchart +core/scripts/texera-helmchart/dist/ \ No newline at end of file diff --git a/core/amber/computing-unit.dockerfile b/core/amber/computing-unit.dockerfile new file mode 100644 index 00000000000..a5b21f04d48 --- /dev/null +++ b/core/amber/computing-unit.dockerfile @@ -0,0 +1,41 @@ +FROM sbtscala/scala-sbt:eclipse-temurin-jammy-11.0.17_8_1.9.3_2.13.11 + +# Set working directory +WORKDIR /core + +# Copy all projects under core to /core +COPY core/ . + +RUN rm -rf amber/user-resources/* + +# Update system and install dependencies +RUN apt-get update && apt-get install -y \ + netcat \ + unzip \ + python3-pip \ + libpq-dev \ + python3-dev \ + && apt-get clean + +# Install Python dependencies +RUN pip3 install --upgrade pip setuptools wheel +RUN pip3 install python-lsp-server python-lsp-server[websockets] + +# Install requirements with a fallback for wordcloud +RUN pip3 install -r amber/requirements.txt +RUN pip3 install --no-cache-dir --find-links https://pypi.org/simple/ -r amber/operator-requirements.txt || \ + pip3 install --no-cache-dir wordcloud==1.9.2 + +# Additional setup +WORKDIR /core +# Add .git for runtime calls to jgit from OPversion +COPY .git ../.git + +# Build services +RUN scripts/build-services.sh + +# Set the default command +CMD ["scripts/workflow-computing-unit.sh"] + +# Expose the required port +EXPOSE 8085 \ No newline at end of file diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/AmberRuntime.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/AmberRuntime.scala index e21a9eb0c09..1ad588ee4f6 100644 --- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/AmberRuntime.scala +++ b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/AmberRuntime.scala @@ -54,6 +54,7 @@ object AmberRuntime { akka.cluster.seed-nodes = [ "akka://Amber@$localIpAddress:2552" ] """) .withFallback(akkaConfig) + .resolve() AmberConfig.masterNodeAddr = createMasterAddress(localIpAddress) createAmberSystem(masterConfig) } @@ -75,6 +76,7 @@ object AmberRuntime { akka.cluster.seed-nodes = [ "akka://Amber@$addr:2552" ] """) .withFallback(akkaConfig) + .resolve() AmberConfig.masterNodeAddr = createMasterAddress(addr) createAmberSystem(workerConfig) } diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/ResultResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/ResultResource.scala index 206546e8d56..ffe83824395 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/ResultResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/ResultResource.scala @@ -5,7 +5,7 @@ import edu.uci.ics.amber.core.virtualidentity.WorkflowIdentity import edu.uci.ics.texera.web.auth.SessionUser import edu.uci.ics.texera.web.model.websocket.request.ResultExportRequest import edu.uci.ics.texera.web.model.websocket.response.ResultExportResponse -import edu.uci.ics.texera.web.service.{ResultExportService, WorkflowService} +import edu.uci.ics.texera.web.service.ResultExportService import io.dropwizard.auth.Auth import javax.ws.rs._ diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/service/ExecutionResultService.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/service/ExecutionResultService.scala index 7d0f6b86e83..5596f236f41 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/service/ExecutionResultService.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/service/ExecutionResultService.scala @@ -4,7 +4,7 @@ import akka.actor.Cancellable import com.fasterxml.jackson.annotation.{JsonTypeInfo, JsonTypeName} import com.fasterxml.jackson.databind.node.ObjectNode import com.typesafe.scalalogging.LazyLogging -import edu.uci.ics.amber.core.storage.DocumentFactory.{ICEBERG, MONGODB} +import edu.uci.ics.amber.core.storage.DocumentFactory.ICEBERG import edu.uci.ics.amber.core.storage.VFSResourceType.MATERIALIZED_RESULT import edu.uci.ics.amber.core.storage.model.VirtualDocument import edu.uci.ics.amber.core.storage.{DocumentFactory, StorageConfig, VFSURIFactory} diff --git a/core/amber/webserver.dockerfile b/core/amber/webserver.dockerfile new file mode 100644 index 00000000000..e0c0d035ff3 --- /dev/null +++ b/core/amber/webserver.dockerfile @@ -0,0 +1,42 @@ +FROM node:18-alpine AS nodegui + +WORKDIR /gui +COPY core/gui/package.json core/gui/yarn.lock ./ +RUN apk add --no-cache python3 make g++ git +RUN corepack enable && corepack prepare yarn@4.5.1 --activate && yarn set version --yarn-path 4.5.1 +# Fake git-version.js during yarn install to prevent git from causing cache +# invalidation of dependencies +# Add tools required for building dependencies +RUN touch git-version.js && YARN_NODE_LINKER=node-modules yarn install + +COPY core/gui . +# Position of .git doesn't matter since it's only there for the revision hash +COPY .git ./.git +RUN apk add --no-cache git && \ + node git-version.js && \ + apk del git && \ + yarn run build + +FROM sbtscala/scala-sbt:eclipse-temurin-jammy-11.0.17_8_1.9.3_2.13.11 + +# copy all projects under core to /core +WORKDIR /core +COPY core/ . + +RUN rm -rf amber/user-resources/* + +RUN apt-get update +RUN apt-get install -y netcat unzip python3-pip libpq-dev python3-dev +RUN pip3 install python-lsp-server python-lsp-server[websockets] +RUN pip3 install -r amber/requirements.txt + +WORKDIR /core +# Add .git for runtime calls to jgit from OPversion +COPY .git ../.git +COPY --from=nodegui /gui/dist ./gui/dist + +RUN scripts/build-services.sh + +CMD ["scripts/server.sh"] + +EXPOSE 8080 \ No newline at end of file diff --git a/core/build.sbt b/core/build.sbt index 8fb469af416..c280b69dd8e 100644 --- a/core/build.sbt +++ b/core/build.sbt @@ -3,6 +3,14 @@ lazy val WorkflowCore = (project in file("workflow-core")) .dependsOn(DAO) .configs(Test) .dependsOn(DAO % "test->test") // test scope dependency +lazy val WorkflowComputingUnitManagingService = (project in file("workflow-computing-unit-managing-service")) + .dependsOn(WorkflowCore) + .settings( + dependencyOverrides ++= Seq( + // override it as io.dropwizard 4 require 2.16.1 or higher + "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.17.0", + ) + ) lazy val WorkflowOperator = (project in file("workflow-operator")) .dependsOn(WorkflowCore) .settings( @@ -39,7 +47,7 @@ lazy val WorkflowExecutionService = (project in file("amber")) // root project definition lazy val CoreProject = (project in file(".")) - .aggregate(DAO, WorkflowCore, WorkflowOperator, WorkflowCompilingService, WorkflowExecutionService) + .aggregate(DAO, WorkflowComputingUnitManagingService, WorkflowCore, WorkflowOperator, WorkflowCompilingService, WorkflowExecutionService) .settings( name := "core", version := "0.1.0", diff --git a/core/dao/build.sbt b/core/dao/build.sbt index 526c37be92e..55046e77ab5 100644 --- a/core/dao/build.sbt +++ b/core/dao/build.sbt @@ -89,4 +89,5 @@ libraryDependencies ++= Seq( libraryDependencies ++= Seq( "mysql" % "mysql-connector-java" % "8.0.33", // MySQL connector "org.yaml" % "snakeyaml" % "1.30", // for reading storage config yaml file + "com.typesafe" % "config" % "1.4.3", // for reading storage config .conf file ) \ No newline at end of file diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/JooqCodeGenerator.scala b/core/dao/src/main/scala/edu/uci/ics/texera/dao/JooqCodeGenerator.scala index f0297781434..e1d6bc77fc3 100644 --- a/core/dao/src/main/scala/edu/uci/ics/texera/dao/JooqCodeGenerator.scala +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/JooqCodeGenerator.scala @@ -2,12 +2,8 @@ package edu.uci.ics.texera.dao import org.jooq.codegen.GenerationTool import org.jooq.meta.jaxb.{Configuration, Jdbc} -import org.yaml.snakeyaml.Yaml - -import java.io.InputStream +import com.typesafe.config.{Config, ConfigFactory} import java.nio.file.{Files, Path} -import java.util.{Map => JMap} -import scala.jdk.CollectionConverters._ object JooqCodeGenerator { @throws[Exception] @@ -17,38 +13,33 @@ object JooqCodeGenerator { Path.of("dao").resolve("src").resolve("main").resolve("resources").resolve("jooq-conf.xml") val jooqConfig: Configuration = GenerationTool.load(Files.newInputStream(jooqXmlPath)) - // Load YAML configuration - val yamlConfPath: Path = Path + // Load .conf configuration + val confPath: Path = Path .of("workflow-core") .resolve("src") .resolve("main") .resolve("resources") - .resolve("storage-config.yaml") - val yaml = new Yaml - val inputStream: InputStream = Files.newInputStream(yamlConfPath) + .resolve("storage.conf") + val conf: Config = ConfigFactory.parseFile(confPath.toFile) - val conf: Map[String, Any] = - yaml.load(inputStream).asInstanceOf[JMap[String, Any]].asScala.toMap + // Resolve placeholders in the configuration file + val resolvedConf: Config = conf.resolve() - val jdbcConfig = conf("storage") - .asInstanceOf[JMap[String, Any]] - .asScala("jdbc") - .asInstanceOf[JMap[String, Any]] - .asScala + // Get JDBC configuration from .conf file + val jdbcUsername = resolvedConf.getString("storage.jdbc.username") + val jdbcPassword = resolvedConf.getString("storage.jdbc.password") + val jdbcUrl = resolvedConf.getString("storage.jdbc.url") // Set JDBC configuration for jOOQ val jooqJdbcConfig = new Jdbc jooqJdbcConfig.setDriver("com.mysql.cj.jdbc.Driver") - jooqJdbcConfig.setUrl(jdbcConfig("url").toString) - jooqJdbcConfig.setUsername(jdbcConfig("username").toString) - jooqJdbcConfig.setPassword(jdbcConfig("password").toString) + jooqJdbcConfig.setUrl(jdbcUrl) + jooqJdbcConfig.setUsername(jdbcUsername) + jooqJdbcConfig.setPassword(jdbcPassword) jooqConfig.setJdbc(jooqJdbcConfig) // Generate the code GenerationTool.generate(jooqConfig) - - // Close input stream - inputStream.close() } } diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Indexes.java b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Indexes.java index 60f5acc26f2..527ef927c2c 100644 --- a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Indexes.java +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Indexes.java @@ -15,6 +15,7 @@ import edu.uci.ics.texera.dao.jooq.generated.tables.User; import edu.uci.ics.texera.dao.jooq.generated.tables.UserConfig; import edu.uci.ics.texera.dao.jooq.generated.tables.Workflow; +import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowComputingUnit; import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowExecutions; import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowOfProject; import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowOfUser; @@ -63,6 +64,7 @@ public class Indexes { public static final Index USER_CONFIG_PRIMARY = Indexes0.USER_CONFIG_PRIMARY; public static final Index WORKFLOW_IDX_WORKFLOW_NAME_DESCRIPTION_CONTENT = Indexes0.WORKFLOW_IDX_WORKFLOW_NAME_DESCRIPTION_CONTENT; public static final Index WORKFLOW_PRIMARY = Indexes0.WORKFLOW_PRIMARY; + public static final Index WORKFLOW_COMPUTING_UNIT_PRIMARY = Indexes0.WORKFLOW_COMPUTING_UNIT_PRIMARY; public static final Index WORKFLOW_EXECUTIONS_PRIMARY = Indexes0.WORKFLOW_EXECUTIONS_PRIMARY; public static final Index WORKFLOW_EXECUTIONS_UID = Indexes0.WORKFLOW_EXECUTIONS_UID; public static final Index WORKFLOW_EXECUTIONS_VID = Indexes0.WORKFLOW_EXECUTIONS_VID; @@ -109,6 +111,7 @@ private static class Indexes0 { public static Index USER_CONFIG_PRIMARY = Internal.createIndex("PRIMARY", UserConfig.USER_CONFIG, new OrderField[] { UserConfig.USER_CONFIG.UID, UserConfig.USER_CONFIG.KEY }, true); public static Index WORKFLOW_IDX_WORKFLOW_NAME_DESCRIPTION_CONTENT = Internal.createIndex("idx_workflow_name_description_content", Workflow.WORKFLOW, new OrderField[] { Workflow.WORKFLOW.NAME, Workflow.WORKFLOW.DESCRIPTION, Workflow.WORKFLOW.CONTENT }, false); public static Index WORKFLOW_PRIMARY = Internal.createIndex("PRIMARY", Workflow.WORKFLOW, new OrderField[] { Workflow.WORKFLOW.WID }, true); + public static Index WORKFLOW_COMPUTING_UNIT_PRIMARY = Internal.createIndex("PRIMARY", WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT, new OrderField[] { WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CUID }, true); public static Index WORKFLOW_EXECUTIONS_PRIMARY = Internal.createIndex("PRIMARY", WorkflowExecutions.WORKFLOW_EXECUTIONS, new OrderField[] { WorkflowExecutions.WORKFLOW_EXECUTIONS.EID }, true); public static Index WORKFLOW_EXECUTIONS_UID = Internal.createIndex("uid", WorkflowExecutions.WORKFLOW_EXECUTIONS, new OrderField[] { WorkflowExecutions.WORKFLOW_EXECUTIONS.UID }, false); public static Index WORKFLOW_EXECUTIONS_VID = Internal.createIndex("vid", WorkflowExecutions.WORKFLOW_EXECUTIONS, new OrderField[] { WorkflowExecutions.WORKFLOW_EXECUTIONS.VID }, false); diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Keys.java b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Keys.java index 3064b7f28b8..98fa07c7ad4 100644 --- a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Keys.java +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Keys.java @@ -15,6 +15,7 @@ import edu.uci.ics.texera.dao.jooq.generated.tables.User; import edu.uci.ics.texera.dao.jooq.generated.tables.UserConfig; import edu.uci.ics.texera.dao.jooq.generated.tables.Workflow; +import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowComputingUnit; import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowExecutions; import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowOfProject; import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowOfUser; @@ -33,6 +34,7 @@ import edu.uci.ics.texera.dao.jooq.generated.tables.records.PublicProjectRecord; import edu.uci.ics.texera.dao.jooq.generated.tables.records.UserConfigRecord; import edu.uci.ics.texera.dao.jooq.generated.tables.records.UserRecord; +import edu.uci.ics.texera.dao.jooq.generated.tables.records.WorkflowComputingUnitRecord; import edu.uci.ics.texera.dao.jooq.generated.tables.records.WorkflowExecutionsRecord; import edu.uci.ics.texera.dao.jooq.generated.tables.records.WorkflowOfProjectRecord; import edu.uci.ics.texera.dao.jooq.generated.tables.records.WorkflowOfUserRecord; @@ -68,6 +70,7 @@ public class Keys { public static final Identity IDENTITY_PROJECT = Identities0.IDENTITY_PROJECT; public static final Identity IDENTITY_USER = Identities0.IDENTITY_USER; public static final Identity IDENTITY_WORKFLOW = Identities0.IDENTITY_WORKFLOW; + public static final Identity IDENTITY_WORKFLOW_COMPUTING_UNIT = Identities0.IDENTITY_WORKFLOW_COMPUTING_UNIT; public static final Identity IDENTITY_WORKFLOW_EXECUTIONS = Identities0.IDENTITY_WORKFLOW_EXECUTIONS; public static final Identity IDENTITY_WORKFLOW_VERSION = Identities0.IDENTITY_WORKFLOW_VERSION; @@ -90,6 +93,7 @@ public class Keys { public static final UniqueKey KEY_USER_GOOGLE_ID = UniqueKeys0.KEY_USER_GOOGLE_ID; public static final UniqueKey KEY_USER_CONFIG_PRIMARY = UniqueKeys0.KEY_USER_CONFIG_PRIMARY; public static final UniqueKey KEY_WORKFLOW_PRIMARY = UniqueKeys0.KEY_WORKFLOW_PRIMARY; + public static final UniqueKey KEY_WORKFLOW_COMPUTING_UNIT_PRIMARY = UniqueKeys0.KEY_WORKFLOW_COMPUTING_UNIT_PRIMARY; public static final UniqueKey KEY_WORKFLOW_EXECUTIONS_PRIMARY = UniqueKeys0.KEY_WORKFLOW_EXECUTIONS_PRIMARY; public static final UniqueKey KEY_WORKFLOW_OF_PROJECT_PRIMARY = UniqueKeys0.KEY_WORKFLOW_OF_PROJECT_PRIMARY; public static final UniqueKey KEY_WORKFLOW_OF_USER_PRIMARY = UniqueKeys0.KEY_WORKFLOW_OF_USER_PRIMARY; @@ -140,6 +144,7 @@ private static class Identities0 { public static Identity IDENTITY_PROJECT = Internal.createIdentity(Project.PROJECT, Project.PROJECT.PID); public static Identity IDENTITY_USER = Internal.createIdentity(User.USER, User.USER.UID); public static Identity IDENTITY_WORKFLOW = Internal.createIdentity(Workflow.WORKFLOW, Workflow.WORKFLOW.WID); + public static Identity IDENTITY_WORKFLOW_COMPUTING_UNIT = Internal.createIdentity(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT, WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CUID); public static Identity IDENTITY_WORKFLOW_EXECUTIONS = Internal.createIdentity(WorkflowExecutions.WORKFLOW_EXECUTIONS, WorkflowExecutions.WORKFLOW_EXECUTIONS.EID); public static Identity IDENTITY_WORKFLOW_VERSION = Internal.createIdentity(WorkflowVersion.WORKFLOW_VERSION, WorkflowVersion.WORKFLOW_VERSION.VID); } @@ -160,6 +165,7 @@ private static class UniqueKeys0 { public static final UniqueKey KEY_USER_GOOGLE_ID = Internal.createUniqueKey(User.USER, "KEY_user_google_id", User.USER.GOOGLE_ID); public static final UniqueKey KEY_USER_CONFIG_PRIMARY = Internal.createUniqueKey(UserConfig.USER_CONFIG, "KEY_user_config_PRIMARY", UserConfig.USER_CONFIG.UID, UserConfig.USER_CONFIG.KEY); public static final UniqueKey KEY_WORKFLOW_PRIMARY = Internal.createUniqueKey(Workflow.WORKFLOW, "KEY_workflow_PRIMARY", Workflow.WORKFLOW.WID); + public static final UniqueKey KEY_WORKFLOW_COMPUTING_UNIT_PRIMARY = Internal.createUniqueKey(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT, "KEY_workflow_computing_unit_PRIMARY", WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CUID); public static final UniqueKey KEY_WORKFLOW_EXECUTIONS_PRIMARY = Internal.createUniqueKey(WorkflowExecutions.WORKFLOW_EXECUTIONS, "KEY_workflow_executions_PRIMARY", WorkflowExecutions.WORKFLOW_EXECUTIONS.EID); public static final UniqueKey KEY_WORKFLOW_OF_PROJECT_PRIMARY = Internal.createUniqueKey(WorkflowOfProject.WORKFLOW_OF_PROJECT, "KEY_workflow_of_project_PRIMARY", WorkflowOfProject.WORKFLOW_OF_PROJECT.WID, WorkflowOfProject.WORKFLOW_OF_PROJECT.PID); public static final UniqueKey KEY_WORKFLOW_OF_USER_PRIMARY = Internal.createUniqueKey(WorkflowOfUser.WORKFLOW_OF_USER, "KEY_workflow_of_user_PRIMARY", WorkflowOfUser.WORKFLOW_OF_USER.UID, WorkflowOfUser.WORKFLOW_OF_USER.WID); diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Tables.java b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Tables.java index 5cb9375ea0f..7a06f72c3d5 100644 --- a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Tables.java +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Tables.java @@ -15,6 +15,7 @@ import edu.uci.ics.texera.dao.jooq.generated.tables.User; import edu.uci.ics.texera.dao.jooq.generated.tables.UserConfig; import edu.uci.ics.texera.dao.jooq.generated.tables.Workflow; +import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowComputingUnit; import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowExecutions; import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowOfProject; import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowOfUser; @@ -87,6 +88,11 @@ public class Tables { */ public static final Workflow WORKFLOW = Workflow.WORKFLOW; + /** + * The table texera_db.workflow_computing_unit. + */ + public static final WorkflowComputingUnit WORKFLOW_COMPUTING_UNIT = WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT; + /** * The table texera_db.workflow_executions. */ diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/TexeraDb.java b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/TexeraDb.java index f138504cef0..b7f5eb48d72 100644 --- a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/TexeraDb.java +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/TexeraDb.java @@ -15,6 +15,7 @@ import edu.uci.ics.texera.dao.jooq.generated.tables.User; import edu.uci.ics.texera.dao.jooq.generated.tables.UserConfig; import edu.uci.ics.texera.dao.jooq.generated.tables.Workflow; +import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowComputingUnit; import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowExecutions; import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowOfProject; import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowOfUser; @@ -40,7 +41,7 @@ @SuppressWarnings({ "all", "unchecked", "rawtypes" }) public class TexeraDb extends SchemaImpl { - private static final long serialVersionUID = -2031114180; + private static final long serialVersionUID = 1442899429; /** * The reference instance of texera_db @@ -102,6 +103,11 @@ public class TexeraDb extends SchemaImpl { */ public final Workflow WORKFLOW = edu.uci.ics.texera.dao.jooq.generated.tables.Workflow.WORKFLOW; + /** + * The table texera_db.workflow_computing_unit. + */ + public final WorkflowComputingUnit WORKFLOW_COMPUTING_UNIT = edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT; + /** * The table texera_db.workflow_executions. */ @@ -180,6 +186,7 @@ private final List> getTables0() { User.USER, UserConfig.USER_CONFIG, Workflow.WORKFLOW, + WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT, WorkflowExecutions.WORKFLOW_EXECUTIONS, WorkflowOfProject.WORKFLOW_OF_PROJECT, WorkflowOfUser.WORKFLOW_OF_USER, diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/WorkflowComputingUnit.java b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/WorkflowComputingUnit.java new file mode 100644 index 00000000000..83f69e16e71 --- /dev/null +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/WorkflowComputingUnit.java @@ -0,0 +1,170 @@ +/* + * This file is generated by jOOQ. + */ +package edu.uci.ics.texera.dao.jooq.generated.tables; + + +import edu.uci.ics.texera.dao.jooq.generated.Indexes; +import edu.uci.ics.texera.dao.jooq.generated.Keys; +import edu.uci.ics.texera.dao.jooq.generated.TexeraDb; +import edu.uci.ics.texera.dao.jooq.generated.tables.records.WorkflowComputingUnitRecord; + +import java.sql.Timestamp; +import java.util.Arrays; +import java.util.List; + +import org.jooq.Field; +import org.jooq.ForeignKey; +import org.jooq.Identity; +import org.jooq.Index; +import org.jooq.Name; +import org.jooq.Record; +import org.jooq.Row5; +import org.jooq.Schema; +import org.jooq.Table; +import org.jooq.TableField; +import org.jooq.UniqueKey; +import org.jooq.impl.DSL; +import org.jooq.impl.TableImpl; +import org.jooq.types.UInteger; + + +/** + * This class is generated by jOOQ. + */ +@SuppressWarnings({ "all", "unchecked", "rawtypes" }) +public class WorkflowComputingUnit extends TableImpl { + + private static final long serialVersionUID = -1011820662; + + /** + * The reference instance of texera_db.workflow_computing_unit + */ + public static final WorkflowComputingUnit WORKFLOW_COMPUTING_UNIT = new WorkflowComputingUnit(); + + /** + * The class holding records for this type + */ + @Override + public Class getRecordType() { + return WorkflowComputingUnitRecord.class; + } + + /** + * The column texera_db.workflow_computing_unit.uid. + */ + public final TableField UID = createField(DSL.name("uid"), org.jooq.impl.SQLDataType.INTEGERUNSIGNED.nullable(false), this, ""); + + /** + * The column texera_db.workflow_computing_unit.name. + */ + public final TableField NAME = createField(DSL.name("name"), org.jooq.impl.SQLDataType.VARCHAR(128).nullable(false), this, ""); + + /** + * The column texera_db.workflow_computing_unit.cuid. + */ + public final TableField CUID = createField(DSL.name("cuid"), org.jooq.impl.SQLDataType.INTEGERUNSIGNED.nullable(false).identity(true), this, ""); + + /** + * The column texera_db.workflow_computing_unit.creation_time. + */ + public final TableField CREATION_TIME = createField(DSL.name("creation_time"), org.jooq.impl.SQLDataType.TIMESTAMP.nullable(false).defaultValue(org.jooq.impl.DSL.field("CURRENT_TIMESTAMP", org.jooq.impl.SQLDataType.TIMESTAMP)), this, ""); + + /** + * The column texera_db.workflow_computing_unit.terminate_time. + */ + public final TableField TERMINATE_TIME = createField(DSL.name("terminate_time"), org.jooq.impl.SQLDataType.TIMESTAMP, this, ""); + + /** + * Create a texera_db.workflow_computing_unit table reference + */ + public WorkflowComputingUnit() { + this(DSL.name("workflow_computing_unit"), null); + } + + /** + * Create an aliased texera_db.workflow_computing_unit table reference + */ + public WorkflowComputingUnit(String alias) { + this(DSL.name(alias), WORKFLOW_COMPUTING_UNIT); + } + + /** + * Create an aliased texera_db.workflow_computing_unit table reference + */ + public WorkflowComputingUnit(Name alias) { + this(alias, WORKFLOW_COMPUTING_UNIT); + } + + private WorkflowComputingUnit(Name alias, Table aliased) { + this(alias, aliased, null); + } + + private WorkflowComputingUnit(Name alias, Table aliased, Field[] parameters) { + super(alias, null, aliased, parameters, DSL.comment("")); + } + + public WorkflowComputingUnit(Table child, ForeignKey key) { + super(child, key, WORKFLOW_COMPUTING_UNIT); + } + + @Override + public Schema getSchema() { + return TexeraDb.TEXERA_DB; + } + + @Override + public List getIndexes() { + return Arrays.asList(Indexes.WORKFLOW_COMPUTING_UNIT_PRIMARY); + } + + @Override + public Identity getIdentity() { + return Keys.IDENTITY_WORKFLOW_COMPUTING_UNIT; + } + + @Override + public UniqueKey getPrimaryKey() { + return Keys.KEY_WORKFLOW_COMPUTING_UNIT_PRIMARY; + } + + @Override + public List> getKeys() { + return Arrays.>asList(Keys.KEY_WORKFLOW_COMPUTING_UNIT_PRIMARY); + } + + @Override + public WorkflowComputingUnit as(String alias) { + return new WorkflowComputingUnit(DSL.name(alias), this); + } + + @Override + public WorkflowComputingUnit as(Name alias) { + return new WorkflowComputingUnit(alias, this); + } + + /** + * Rename this table + */ + @Override + public WorkflowComputingUnit rename(String name) { + return new WorkflowComputingUnit(DSL.name(name), null); + } + + /** + * Rename this table + */ + @Override + public WorkflowComputingUnit rename(Name name) { + return new WorkflowComputingUnit(name, null); + } + + // ------------------------------------------------------------------------- + // Row5 type methods + // ------------------------------------------------------------------------- + + @Override + public Row5 fieldsRow() { + return (Row5) super.fieldsRow(); + } +} diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/daos/WorkflowComputingUnitDao.java b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/daos/WorkflowComputingUnitDao.java new file mode 100644 index 00000000000..8ff1f9b332d --- /dev/null +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/daos/WorkflowComputingUnitDao.java @@ -0,0 +1,119 @@ +/* + * This file is generated by jOOQ. + */ +package edu.uci.ics.texera.dao.jooq.generated.tables.daos; + + +import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowComputingUnit; +import edu.uci.ics.texera.dao.jooq.generated.tables.records.WorkflowComputingUnitRecord; + +import java.sql.Timestamp; +import java.util.List; + +import org.jooq.Configuration; +import org.jooq.impl.DAOImpl; +import org.jooq.types.UInteger; + + +/** + * This class is generated by jOOQ. + */ +@SuppressWarnings({ "all", "unchecked", "rawtypes" }) +public class WorkflowComputingUnitDao extends DAOImpl { + + /** + * Create a new WorkflowComputingUnitDao without any configuration + */ + public WorkflowComputingUnitDao() { + super(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT, edu.uci.ics.texera.dao.jooq.generated.tables.pojos.WorkflowComputingUnit.class); + } + + /** + * Create a new WorkflowComputingUnitDao with an attached configuration + */ + public WorkflowComputingUnitDao(Configuration configuration) { + super(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT, edu.uci.ics.texera.dao.jooq.generated.tables.pojos.WorkflowComputingUnit.class, configuration); + } + + @Override + public UInteger getId(edu.uci.ics.texera.dao.jooq.generated.tables.pojos.WorkflowComputingUnit object) { + return object.getCuid(); + } + + /** + * Fetch records that have uid BETWEEN lowerInclusive AND upperInclusive + */ + public List fetchRangeOfUid(UInteger lowerInclusive, UInteger upperInclusive) { + return fetchRange(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.UID, lowerInclusive, upperInclusive); + } + + /** + * Fetch records that have uid IN (values) + */ + public List fetchByUid(UInteger... values) { + return fetch(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.UID, values); + } + + /** + * Fetch records that have name BETWEEN lowerInclusive AND upperInclusive + */ + public List fetchRangeOfName(String lowerInclusive, String upperInclusive) { + return fetchRange(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.NAME, lowerInclusive, upperInclusive); + } + + /** + * Fetch records that have name IN (values) + */ + public List fetchByName(String... values) { + return fetch(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.NAME, values); + } + + /** + * Fetch records that have cuid BETWEEN lowerInclusive AND upperInclusive + */ + public List fetchRangeOfCuid(UInteger lowerInclusive, UInteger upperInclusive) { + return fetchRange(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CUID, lowerInclusive, upperInclusive); + } + + /** + * Fetch records that have cuid IN (values) + */ + public List fetchByCuid(UInteger... values) { + return fetch(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CUID, values); + } + + /** + * Fetch a unique record that has cuid = value + */ + public edu.uci.ics.texera.dao.jooq.generated.tables.pojos.WorkflowComputingUnit fetchOneByCuid(UInteger value) { + return fetchOne(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CUID, value); + } + + /** + * Fetch records that have creation_time BETWEEN lowerInclusive AND upperInclusive + */ + public List fetchRangeOfCreationTime(Timestamp lowerInclusive, Timestamp upperInclusive) { + return fetchRange(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CREATION_TIME, lowerInclusive, upperInclusive); + } + + /** + * Fetch records that have creation_time IN (values) + */ + public List fetchByCreationTime(Timestamp... values) { + return fetch(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CREATION_TIME, values); + } + + /** + * Fetch records that have terminate_time BETWEEN lowerInclusive AND upperInclusive + */ + public List fetchRangeOfTerminateTime(Timestamp lowerInclusive, Timestamp upperInclusive) { + return fetchRange(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.TERMINATE_TIME, lowerInclusive, upperInclusive); + } + + /** + * Fetch records that have terminate_time IN (values) + */ + public List fetchByTerminateTime(Timestamp... values) { + return fetch(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.TERMINATE_TIME, values); + } +} diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/interfaces/IWorkflowComputingUnit.java b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/interfaces/IWorkflowComputingUnit.java new file mode 100644 index 00000000000..38b2bc13deb --- /dev/null +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/interfaces/IWorkflowComputingUnit.java @@ -0,0 +1,82 @@ +/* + * This file is generated by jOOQ. + */ +package edu.uci.ics.texera.dao.jooq.generated.tables.interfaces; + + +import java.io.Serializable; +import java.sql.Timestamp; + +import org.jooq.types.UInteger; + + +/** + * This class is generated by jOOQ. + */ +@SuppressWarnings({ "all", "unchecked", "rawtypes" }) +public interface IWorkflowComputingUnit extends Serializable { + + /** + * Setter for texera_db.workflow_computing_unit.uid. + */ + public void setUid(UInteger value); + + /** + * Getter for texera_db.workflow_computing_unit.uid. + */ + public UInteger getUid(); + + /** + * Setter for texera_db.workflow_computing_unit.name. + */ + public void setName(String value); + + /** + * Getter for texera_db.workflow_computing_unit.name. + */ + public String getName(); + + /** + * Setter for texera_db.workflow_computing_unit.cuid. + */ + public void setCuid(UInteger value); + + /** + * Getter for texera_db.workflow_computing_unit.cuid. + */ + public UInteger getCuid(); + + /** + * Setter for texera_db.workflow_computing_unit.creation_time. + */ + public void setCreationTime(Timestamp value); + + /** + * Getter for texera_db.workflow_computing_unit.creation_time. + */ + public Timestamp getCreationTime(); + + /** + * Setter for texera_db.workflow_computing_unit.terminate_time. + */ + public void setTerminateTime(Timestamp value); + + /** + * Getter for texera_db.workflow_computing_unit.terminate_time. + */ + public Timestamp getTerminateTime(); + + // ------------------------------------------------------------------------- + // FROM and INTO + // ------------------------------------------------------------------------- + + /** + * Load data from another generated Record/POJO implementing the common interface IWorkflowComputingUnit + */ + public void from(edu.uci.ics.texera.dao.jooq.generated.tables.interfaces.IWorkflowComputingUnit from); + + /** + * Copy data into another generated Record/POJO implementing the common interface IWorkflowComputingUnit + */ + public E into(E into); +} diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/pojos/WorkflowComputingUnit.java b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/pojos/WorkflowComputingUnit.java new file mode 100644 index 00000000000..fd9712e1ebc --- /dev/null +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/pojos/WorkflowComputingUnit.java @@ -0,0 +1,134 @@ +/* + * This file is generated by jOOQ. + */ +package edu.uci.ics.texera.dao.jooq.generated.tables.pojos; + + +import edu.uci.ics.texera.dao.jooq.generated.tables.interfaces.IWorkflowComputingUnit; + +import java.sql.Timestamp; + +import org.jooq.types.UInteger; + + +/** + * This class is generated by jOOQ. + */ +@SuppressWarnings({ "all", "unchecked", "rawtypes" }) +public class WorkflowComputingUnit implements IWorkflowComputingUnit { + + private static final long serialVersionUID = -891676537; + + private UInteger uid; + private String name; + private UInteger cuid; + private Timestamp creationTime; + private Timestamp terminateTime; + + public WorkflowComputingUnit() {} + + public WorkflowComputingUnit(IWorkflowComputingUnit value) { + this.uid = value.getUid(); + this.name = value.getName(); + this.cuid = value.getCuid(); + this.creationTime = value.getCreationTime(); + this.terminateTime = value.getTerminateTime(); + } + + public WorkflowComputingUnit( + UInteger uid, + String name, + UInteger cuid, + Timestamp creationTime, + Timestamp terminateTime + ) { + this.uid = uid; + this.name = name; + this.cuid = cuid; + this.creationTime = creationTime; + this.terminateTime = terminateTime; + } + + @Override + public UInteger getUid() { + return this.uid; + } + + @Override + public void setUid(UInteger uid) { + this.uid = uid; + } + + @Override + public String getName() { + return this.name; + } + + @Override + public void setName(String name) { + this.name = name; + } + + @Override + public UInteger getCuid() { + return this.cuid; + } + + @Override + public void setCuid(UInteger cuid) { + this.cuid = cuid; + } + + @Override + public Timestamp getCreationTime() { + return this.creationTime; + } + + @Override + public void setCreationTime(Timestamp creationTime) { + this.creationTime = creationTime; + } + + @Override + public Timestamp getTerminateTime() { + return this.terminateTime; + } + + @Override + public void setTerminateTime(Timestamp terminateTime) { + this.terminateTime = terminateTime; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("WorkflowComputingUnit ("); + + sb.append(uid); + sb.append(", ").append(name); + sb.append(", ").append(cuid); + sb.append(", ").append(creationTime); + sb.append(", ").append(terminateTime); + + sb.append(")"); + return sb.toString(); + } + + // ------------------------------------------------------------------------- + // FROM and INTO + // ------------------------------------------------------------------------- + + @Override + public void from(IWorkflowComputingUnit from) { + setUid(from.getUid()); + setName(from.getName()); + setCuid(from.getCuid()); + setCreationTime(from.getCreationTime()); + setTerminateTime(from.getTerminateTime()); + } + + @Override + public E into(E into) { + into.from(this); + return into; + } +} diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/records/WorkflowComputingUnitRecord.java b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/records/WorkflowComputingUnitRecord.java new file mode 100644 index 00000000000..61f1cbad707 --- /dev/null +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/records/WorkflowComputingUnitRecord.java @@ -0,0 +1,288 @@ +/* + * This file is generated by jOOQ. + */ +package edu.uci.ics.texera.dao.jooq.generated.tables.records; + + +import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowComputingUnit; +import edu.uci.ics.texera.dao.jooq.generated.tables.interfaces.IWorkflowComputingUnit; + +import java.sql.Timestamp; + +import org.jooq.Field; +import org.jooq.Record1; +import org.jooq.Record5; +import org.jooq.Row5; +import org.jooq.impl.UpdatableRecordImpl; +import org.jooq.types.UInteger; + + +/** + * This class is generated by jOOQ. + */ +@SuppressWarnings({ "all", "unchecked", "rawtypes" }) +public class WorkflowComputingUnitRecord extends UpdatableRecordImpl implements Record5, IWorkflowComputingUnit { + + private static final long serialVersionUID = 486964963; + + /** + * Setter for texera_db.workflow_computing_unit.uid. + */ + @Override + public void setUid(UInteger value) { + set(0, value); + } + + /** + * Getter for texera_db.workflow_computing_unit.uid. + */ + @Override + public UInteger getUid() { + return (UInteger) get(0); + } + + /** + * Setter for texera_db.workflow_computing_unit.name. + */ + @Override + public void setName(String value) { + set(1, value); + } + + /** + * Getter for texera_db.workflow_computing_unit.name. + */ + @Override + public String getName() { + return (String) get(1); + } + + /** + * Setter for texera_db.workflow_computing_unit.cuid. + */ + @Override + public void setCuid(UInteger value) { + set(2, value); + } + + /** + * Getter for texera_db.workflow_computing_unit.cuid. + */ + @Override + public UInteger getCuid() { + return (UInteger) get(2); + } + + /** + * Setter for texera_db.workflow_computing_unit.creation_time. + */ + @Override + public void setCreationTime(Timestamp value) { + set(3, value); + } + + /** + * Getter for texera_db.workflow_computing_unit.creation_time. + */ + @Override + public Timestamp getCreationTime() { + return (Timestamp) get(3); + } + + /** + * Setter for texera_db.workflow_computing_unit.terminate_time. + */ + @Override + public void setTerminateTime(Timestamp value) { + set(4, value); + } + + /** + * Getter for texera_db.workflow_computing_unit.terminate_time. + */ + @Override + public Timestamp getTerminateTime() { + return (Timestamp) get(4); + } + + // ------------------------------------------------------------------------- + // Primary key information + // ------------------------------------------------------------------------- + + @Override + public Record1 key() { + return (Record1) super.key(); + } + + // ------------------------------------------------------------------------- + // Record5 type implementation + // ------------------------------------------------------------------------- + + @Override + public Row5 fieldsRow() { + return (Row5) super.fieldsRow(); + } + + @Override + public Row5 valuesRow() { + return (Row5) super.valuesRow(); + } + + @Override + public Field field1() { + return WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.UID; + } + + @Override + public Field field2() { + return WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.NAME; + } + + @Override + public Field field3() { + return WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CUID; + } + + @Override + public Field field4() { + return WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CREATION_TIME; + } + + @Override + public Field field5() { + return WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.TERMINATE_TIME; + } + + @Override + public UInteger component1() { + return getUid(); + } + + @Override + public String component2() { + return getName(); + } + + @Override + public UInteger component3() { + return getCuid(); + } + + @Override + public Timestamp component4() { + return getCreationTime(); + } + + @Override + public Timestamp component5() { + return getTerminateTime(); + } + + @Override + public UInteger value1() { + return getUid(); + } + + @Override + public String value2() { + return getName(); + } + + @Override + public UInteger value3() { + return getCuid(); + } + + @Override + public Timestamp value4() { + return getCreationTime(); + } + + @Override + public Timestamp value5() { + return getTerminateTime(); + } + + @Override + public WorkflowComputingUnitRecord value1(UInteger value) { + setUid(value); + return this; + } + + @Override + public WorkflowComputingUnitRecord value2(String value) { + setName(value); + return this; + } + + @Override + public WorkflowComputingUnitRecord value3(UInteger value) { + setCuid(value); + return this; + } + + @Override + public WorkflowComputingUnitRecord value4(Timestamp value) { + setCreationTime(value); + return this; + } + + @Override + public WorkflowComputingUnitRecord value5(Timestamp value) { + setTerminateTime(value); + return this; + } + + @Override + public WorkflowComputingUnitRecord values(UInteger value1, String value2, UInteger value3, Timestamp value4, Timestamp value5) { + value1(value1); + value2(value2); + value3(value3); + value4(value4); + value5(value5); + return this; + } + + // ------------------------------------------------------------------------- + // FROM and INTO + // ------------------------------------------------------------------------- + + @Override + public void from(IWorkflowComputingUnit from) { + setUid(from.getUid()); + setName(from.getName()); + setCuid(from.getCuid()); + setCreationTime(from.getCreationTime()); + setTerminateTime(from.getTerminateTime()); + } + + @Override + public E into(E into) { + into.from(this); + return into; + } + + // ------------------------------------------------------------------------- + // Constructors + // ------------------------------------------------------------------------- + + /** + * Create a detached WorkflowComputingUnitRecord + */ + public WorkflowComputingUnitRecord() { + super(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT); + } + + /** + * Create a detached, initialised WorkflowComputingUnitRecord + */ + public WorkflowComputingUnitRecord(UInteger uid, String name, UInteger cuid, Timestamp creationTime, Timestamp terminateTime) { + super(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT); + + set(0, uid); + set(1, name); + set(2, cuid); + set(3, creationTime); + set(4, terminateTime); + } +} diff --git a/core/gui/proxy.config.json b/core/gui/proxy.config.json index 29ce58446f0..979a0f9d5dd 100755 --- a/core/gui/proxy.config.json +++ b/core/gui/proxy.config.json @@ -1,24 +1,24 @@ { - "/api/compile": { - "target": "http://localhost:9090", - "secure": false, + "/api/computing-unit": { + "target": "http://localhost:8888", + "secure": false, "changeOrigin": true }, "/api": { - "target": "http://localhost:8080", + "target": "http://texera.example.com:30080", "secure": false, - "changeOrigin": false + "changeOrigin": true }, "/wsapi": { - "target": "http://localhost:8085", + "target": "http://texera.example.com:30080", "secure": false, - "changeOrigin": false, + "changeOrigin": true, "ws": true }, "/rtc": { - "target": "http://localhost:1234", + "target": "http://texera.example.com:30080", "ws": true, "secure": false, - "changeOrigin": false + "changeOrigin": true } } diff --git a/core/gui/src/app/app.module.ts b/core/gui/src/app/app.module.ts index 38f59a986a1..164d9c926f5 100644 --- a/core/gui/src/app/app.module.ts +++ b/core/gui/src/app/app.module.ts @@ -137,11 +137,14 @@ import { LandingPageComponent } from "./hub/component/landing-page/landing-page. import { BrowseSectionComponent } from "./hub/component/browse-section/browse-section.component"; import { BreakpointConditionInputComponent } from "./workspace/component/code-editor-dialog/breakpoint-condition-input/breakpoint-condition-input.component"; import { CodeDebuggerComponent } from "./workspace/component/code-editor-dialog/code-debugger.component"; +import { ComputingUnitSelectionComponent } from "./workspace/component/power-button/computing-unit-selection.component"; +import { NzDividerModule } from "ng-zorro-antd/divider"; import { GoogleAuthService } from "./common/service/user/google-auth.service"; import { SocialLoginModule, SocialAuthServiceConfig, GoogleSigninButtonModule } from "@abacritt/angularx-social-login"; import { GoogleLoginProvider } from "@abacritt/angularx-social-login"; import { lastValueFrom } from "rxjs"; import { HubSearchResultComponent } from "./hub/component/hub-search-result/hub-search-result.component"; +import { NzProgressModule } from "ng-zorro-antd/progress"; registerLocaleData(en); @@ -230,6 +233,7 @@ registerLocaleData(en); BreakpointConditionInputComponent, CodeDebuggerComponent, HubSearchResultComponent, + ComputingUnitSelectionComponent, ], imports: [ BrowserModule, @@ -289,8 +293,10 @@ registerLocaleData(en); NzTreeViewModule, NzNoAnimationModule, TreeModule, + NzDividerModule, SocialLoginModule, GoogleSigninButtonModule, + NzProgressModule, ], providers: [ provideNzI18n(en_US), diff --git a/core/gui/src/app/dashboard/component/dashboard.component.html b/core/gui/src/app/dashboard/component/dashboard.component.html index 5473ef1f408..4dff29a12a9 100644 --- a/core/gui/src/app/dashboard/component/dashboard.component.html +++ b/core/gui/src/app/dashboard/component/dashboard.component.html @@ -152,7 +152,7 @@ *ngIf="!isLogin && googleLogin" type="standard" size="large" - width="200"> + [width]="200"> diff --git a/core/gui/src/app/workspace/component/menu/menu.component.html b/core/gui/src/app/workspace/component/menu/menu.component.html index 55c7b241464..33db9d917b8 100644 --- a/core/gui/src/app/workspace/component/menu/menu.component.html +++ b/core/gui/src/app/workspace/component/menu/menu.component.html @@ -307,7 +307,12 @@ nzType="ellipsis"> + + + + + +
    +
  • +
    + + + {{ getComputingUnitName(unit.uri) }} + +
    +
    +
  • +
  • +
  • +
    + + Computing Unit +
    +
  • +
+
+ + + + Create Computing Unit + +
+
+ Select Memory + + + + + +
+ +
+ Select CPU + + + + + +
+
+
+ + + + +
+ + +
+
+

CPU

+

+ {{getCpuValue() | number:'1.2-2' || '0.00'}} + / {{getCpuLimit()}} {{getCpuLimitUnit()}} +

+
+
+

Memory

+

+ {{getMemoryValue() | number:'1.2-2' || '0.00'}} + / {{getMemoryLimit()}} {{getMemoryLimitUnit()}} +

+
+
+
diff --git a/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.scss b/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.scss new file mode 100644 index 00000000000..00f186b0349 --- /dev/null +++ b/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.scss @@ -0,0 +1,124 @@ +.computing-units-selection { + display: grid; + grid-template-columns: 1fr; + justify-content: center; + align-items: center; + width: 100%; +} + +.computing-units-selection.metrics-visible { + grid-template-columns: repeat(2, 1fr); + justify-content: center; + align-items: center; + justify-items: end; + width: 100%; + gap: 10px; +} + +.computing-units-dropdown { + min-width: 250px; + max-width: 350px; +} + +#computing-unit-option.unit-selected { + background-color: #f1eeee; +} + +.create-computing-unit { + display: flex; + gap: 10px; + justify-content: start; + align-items: center; +} + +.computing-unit-name { + display: flex; + align-items: center; + justify-content: center; +} + +.terminate-icon:hover { + opacity: 0.8; +} + +.memory-selection, +.cpu-selection { + width: 200px; +} + +.create-compute-unit-container { + display: grid; + grid-template-columns: repeat(2, 1fr); + gap: 10px; + justify-content: start; + align-items: center; +} + +.select-unit { + display: flex; + flex-direction: column; + gap: 10px; + justify-content: center; + align-items: start; +} + +.metrics-container { + display: grid; + align-items: center; + grid-template-rows: repeat(2, 1fr); + gap: 3px; + height: 32px; + width: 100%; + padding: 0; + border: none; +} + +.metric-item { + display: grid; + grid-template-columns: repeat(2, 1fr); + width: 100%; + height: 16px; + align-items: center; +} + +#cpu-progress-bar, +#memory-progress-bar { + height: 16px; +} + +.metric-label { + font-size: 10px; +} + +.resource-metrics { + width: 250px; + display: grid; + grid-template-columns: repeat(2, 1fr); + grid-template-rows: repeat(1, 1fr); + justify-content: start; + align-items: center; + gap: 5px; +} + +.general-metric { + display: flex; + flex-direction: column; + width: 100%; + background-color: #f9fafb; + border-radius: 3px; + padding: 10px; + gap: 3px; +} + +.metric-unit { + font-size: 8px; +} + +.metric-name { + font-size: 10px; + margin: 0; +} + +.metric-value { + margin: 0; +} diff --git a/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.spec.ts b/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.spec.ts new file mode 100644 index 00000000000..cd03abe8d8f --- /dev/null +++ b/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.spec.ts @@ -0,0 +1,31 @@ +import { ComponentFixture, TestBed } from "@angular/core/testing"; +import { HttpClientTestingModule } from "@angular/common/http/testing"; +import { ComputingUnitSelectionComponent } from "./computing-unit-selection.component"; +import { NzButtonModule } from "ng-zorro-antd/button"; +import { CommonModule } from "@angular/common"; +import { NzIconModule } from "ng-zorro-antd/icon"; + +describe("PowerButtonComponent", () => { + let component: ComputingUnitSelectionComponent; + let fixture: ComponentFixture; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + declarations: [ComputingUnitSelectionComponent], // Declare if not standalone + imports: [ + HttpClientTestingModule, // Use TestingModule instead of HttpClientModule + CommonModule, + NzButtonModule, + NzIconModule, + ], + }).compileComponents(); + + fixture = TestBed.createComponent(ComputingUnitSelectionComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it("should create", () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.ts b/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.ts new file mode 100644 index 00000000000..6229d82b4f7 --- /dev/null +++ b/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.ts @@ -0,0 +1,354 @@ +import { Component, Input, OnInit } from "@angular/core"; +import { interval } from "rxjs"; +import { switchMap } from "rxjs/operators"; +import { WorkflowComputingUnitManagingService } from "../../service/workflow-computing-unit/workflow-computing-unit-managing.service"; +import { DashboardWorkflowComputingUnit } from "../../types/workflow-computing-unit"; +import { NotificationService } from "../../../common/service/notification/notification.service"; +import { WorkflowWebsocketService } from "../../service/workflow-websocket/workflow-websocket.service"; +import { WorkflowActionService } from "../../service/workflow-graph/model/workflow-action.service"; +import { isDefined } from "../../../common/util/predicate"; +import { UntilDestroy, untilDestroyed } from "@ngneat/until-destroy"; + +@UntilDestroy() +@Component({ + selector: "texera-computing-unit-selection", + templateUrl: "./computing-unit-selection.component.html", + styleUrls: ["./computing-unit-selection.component.scss"], +}) +export class ComputingUnitSelectionComponent implements OnInit { + @Input() + workflowId: number | undefined; + + selectedComputingUnit: DashboardWorkflowComputingUnit | null = null; + computingUnits: DashboardWorkflowComputingUnit[] = []; + private readonly REFRESH_INTERVAL_MS = 2000; + + addComputeUnitModalVisible = false; + selectedMemory: string = "2Gi"; + selectedCpu: string = "2"; + + constructor( + private computingUnitService: WorkflowComputingUnitManagingService, + private notificationService: NotificationService, + private workflowWebsocketService: WorkflowWebsocketService, + private workflowActionService: WorkflowActionService + ) {} + + ngOnInit(): void { + this.refreshComputingUnits(); + } + + /** + * Periodically refresh the list of computing units. + */ + private refreshComputingUnits(): void { + interval(this.REFRESH_INTERVAL_MS) + .pipe( + switchMap(() => this.computingUnitService.listComputingUnits()), + untilDestroyed(this) + ) + .subscribe({ + next: (units: DashboardWorkflowComputingUnit[]) => this.updateComputingUnits(units), + error: (err: unknown) => console.error("Failed to fetch computing units:", err), + }); + } + + /** + * Update the computing units list, maintaining object references for the same CUID. + */ + private updateComputingUnits(newUnits: DashboardWorkflowComputingUnit[]): void { + const unitMap = new Map(this.computingUnits.map(unit => [unit.computingUnit.cuid, unit])); + + this.computingUnits = newUnits.map(newUnit => + unitMap.has(newUnit.computingUnit.cuid) + ? Object.assign(unitMap.get(newUnit.computingUnit.cuid)!, newUnit) + : newUnit + ); + + // If selected computing unit is removed, deselect it + if ( + this.selectedComputingUnit && + !this.computingUnits.some(unit => unit.computingUnit.cuid === this.selectedComputingUnit!.computingUnit.cuid) + ) { + this.selectedComputingUnit = null; + } + } + + /** + * Start a new computing unit. + */ + startComputingUnit(): void { + const computeUnitName = `Compute for Workflow ${this.workflowId}`; + const computeCPU = this.selectedCpu; + const computeMemory = this.selectedMemory; + this.computingUnitService + .createComputingUnit(computeUnitName, computeCPU, computeMemory) + .pipe(untilDestroyed(this)) + .subscribe({ + next: (unit: DashboardWorkflowComputingUnit) => { + this.notificationService.success("Successfully created the new compute unit"); + this.refreshComputingUnits(); + }, + error: (err: unknown) => this.notificationService.error("Failed to start computing unit"), + }); + } + + /** + * Terminate a computing unit. + * @param cuid The CUID of the unit to terminate. + */ + terminateComputingUnit(cuid: number): void { + const uri = this.computingUnits.find(unit => unit.computingUnit.cuid === cuid)?.uri; + + if (!uri) { + this.notificationService.error("Invalid computing unit URI."); + return; + } + + this.computingUnitService + .terminateComputingUnit(uri) + .pipe(untilDestroyed(this)) + .subscribe({ + next: (res: Response) => { + this.notificationService.success(`Terminated ${this.getComputingUnitName(uri)}`); + this.refreshComputingUnits(); + }, + error: (err: unknown) => this.notificationService.error("Failed to terminate computing unit"), + }); + } + + /** + * Called whenever the selected computing unit changes. + */ + onComputingUnitChange(newSelection: DashboardWorkflowComputingUnit | null): void { + console.log("Selected computing unit changed to:", newSelection); + const wid = this.workflowActionService.getWorkflowMetadata()?.wid; + if (newSelection && isDefined(wid)) { + console.log(`Selected Unit URI: ${newSelection.uri}`); + this.workflowWebsocketService.reopenWebsocket(wid, newSelection.computingUnit.cuid); + } else { + console.log("Selection cleared."); + this.workflowWebsocketService.closeWebsocket(); + } + } + + isComputingUnitRunning(): boolean { + return this.selectedComputingUnit != null && this.selectedComputingUnit.status === "Running"; + } + + computeStatus(): string { + if (!this.selectedComputingUnit) { + return "processing"; + } + switch (this.selectedComputingUnit.status) { + case "Running": + return "success"; + case "Pending" || "Terminating": + return "warning"; + default: + return "error"; + } + } + + cannotSelectUnit(unit: DashboardWorkflowComputingUnit): boolean { + return !unit || unit.status !== "Running"; + } + + isSelectedUnit(unit: DashboardWorkflowComputingUnit): boolean { + return unit.uri === this.selectedComputingUnit?.uri; + } + + showAddComputeUnitModalVisible(): void { + this.addComputeUnitModalVisible = true; + } + + handleAddComputeUnitModalOk(): void { + this.startComputingUnit(); + this.addComputeUnitModalVisible = false; + } + + handleAddComputeUnitModalCancel(): void { + this.addComputeUnitModalVisible = false; + } + + /** + * Gets the computing unit name from the units URI + * @param unitURI (i.e. "computing-unit-85.workflow-computing-unit-svc.workflow-computing-unit-pool.svc.cluster.local") + * @return "Computing unit 85" + */ + getComputingUnitName(unitURI: string): string { + const computingUnit = unitURI.split(".")[0]; + return computingUnit + .split("-") + .map((word, index) => (index < 2 ? word.charAt(0).toUpperCase() + word.slice(1) : word)) + .join(" "); + } + + /** + * Parses computing units resource unit + * @param resource (i.e. "12412512n") + * @return associated unit with resource (i.e. "n", "Mi", "Gi", ...) + */ + parseResourceUnit(resource: string): string { + const match = resource.match(/[a-z].*/i); + return match ? match[0] : ""; + } + + /** + * Parses computing units numerical value + * @param resource (i.e. "12412512n") + * @return associated number with resource (i.e. 12412512) + */ + parseResourceNumber(resource: string): number { + const match = resource.match(/[0-9.]*/); + return match ? Number(match[0]) : 0; + } + + /** + * Convert computing cpu unit resource number to a specific unit + * @param from (i.e. "12412512n") + * @param toUnit (i.e. cores) + * @return i.e. 1.2412512 Cores + */ + cpuResourceConversion(from: string, toUnit: string): string { + // CPU conversion constants (base unit: nanocores) + type CpuUnit = "n" | "u" | "m" | ""; + const cpuUnits: Record = { + n: 1, // nanocores + u: 10 ** 3, // microcores + m: 10 ** 6, // millicores + "": 10 ** 9, // cores + }; + + const fromNumber: number = this.parseResourceNumber(from); + const fromUnit: string = this.parseResourceUnit(from); + + if (!(fromUnit in cpuUnits) || !(toUnit in cpuUnits)) { + return ""; + } + return `${fromNumber * (cpuUnits[fromUnit as CpuUnit] / cpuUnits[toUnit as CpuUnit])} ${toUnit}`; + } + + /** + * Convert computing unit memory resource number to a specific unit + * @param from (i.e. "523Mi") + * @param toUnit (i.e. "Gi") + * @return i.e. 0.524 Gi + */ + memoryResourceConversion(from: string, toUnit: string): string { + // Memory conversion constants (base unit: bytes) + type MemoryUnit = "Ki" | "Mi" | "Gi" | ""; + const memoryUnits = { + "": 1, // bytes + Ki: 1024, // KiB + Mi: 1024 ** 2, // MiB + Gi: 1024 ** 3, // GiB + }; + + const fromNumber: number = this.parseResourceNumber(from); + const fromUnit: string = this.parseResourceUnit(from); + + if (!(fromUnit in memoryUnits) || !(toUnit in memoryUnits)) { + return ""; + } + return `${fromNumber * (memoryUnits[fromUnit as MemoryUnit] / memoryUnits[toUnit as MemoryUnit])} ${toUnit}`; + } + + getCurrentComputingUnitCpuUsage(): string { + return this.selectedComputingUnit?.metrics.cpuUsage || ""; + } + + getCurrentComputingUnitMemoryUsage(): string { + return this.selectedComputingUnit?.metrics.memoryUsage || ""; + } + + getCurrentComputingUnitCpuLimit(): string { + return this.selectedComputingUnit?.resourceLimits.cpuLimit || ""; + } + + getCurrentComputingUnitMemoryLimit(): string { + return this.selectedComputingUnit?.resourceLimits.memoryLimit || ""; + } + + /** + * Get badge color based on the unit's status. + */ + getBadgeColor(status: string): string { + return status === "Running" ? "green" : "yellow"; + } + + getCpuLimit(): number { + return this.parseResourceNumber(this.getCurrentComputingUnitCpuLimit()); + } + + getCpuLimitUnit(): string { + let unit = this.parseResourceUnit(this.getCurrentComputingUnitCpuLimit()); + if (!unit) { + return this.getCpuLimit() == 1 ? "Core" : "Cores"; + } + return this.parseResourceUnit(this.getCurrentComputingUnitCpuLimit()); + } + + getMemoryLimit(): number { + return this.parseResourceNumber(this.getCurrentComputingUnitMemoryLimit()); + } + + getMemoryLimitUnit(): string { + return this.parseResourceUnit(this.getCurrentComputingUnitMemoryLimit()); + } + + getCpuValue(): number { + // convert to appropriate unit based on the limit unit + const cpuLimitUnit: string = this.getCpuLimitUnit(); + const convertedValue: string = this.cpuResourceConversion(this.getCurrentComputingUnitCpuUsage(), cpuLimitUnit); + return this.parseResourceNumber(convertedValue); + } + + getMemoryValue(): number { + // convert to appropriate unit based on the limit + const memoryLimitUnit: string = this.getMemoryLimitUnit(); + const convertedValue: string = this.memoryResourceConversion( + this.getCurrentComputingUnitMemoryUsage(), + memoryLimitUnit + ); + return this.parseResourceNumber(convertedValue); + } + + getCpuPercentage(): number { + // handle divison by zero + const cpuLimit = this.getCpuLimit(); + if (cpuLimit <= 0) { + return 0; + } + return (this.getCpuValue() / cpuLimit) * 100; + } + + getCpuStatus(): "success" | "exception" | "active" | "normal" { + const usage = this.getCpuValue(); + const limit = this.getCpuLimit(); + return usage >= limit ? "exception" : "active"; + } + + getMemoryPercentage(): number { + // handle divison by zero + const memoryLimit = this.getMemoryLimit(); + if (memoryLimit <= 0) { + return 0; + } + return (this.getMemoryValue() / memoryLimit) * 100; + } + + getMemoryStatus(): "success" | "exception" | "active" | "normal" { + const usage = this.getMemoryValue(); + const limit = this.getMemoryLimit(); + return usage >= limit ? "exception" : "active"; + } + + getCpuUnit(): string { + return this.parseResourceUnit(this.getCurrentComputingUnitCpuUsage()); + } + + getMemoryUnit(): string { + return this.parseResourceUnit(this.getCurrentComputingUnitMemoryUsage()); + } +} diff --git a/core/gui/src/app/workspace/service/workflow-computing-unit/workflow-computing-unit-managing.service.ts b/core/gui/src/app/workspace/service/workflow-computing-unit/workflow-computing-unit-managing.service.ts new file mode 100644 index 00000000000..d8e0aefae18 --- /dev/null +++ b/core/gui/src/app/workspace/service/workflow-computing-unit/workflow-computing-unit-managing.service.ts @@ -0,0 +1,72 @@ +import { Injectable } from "@angular/core"; +import { HttpClient, HttpParams } from "@angular/common/http"; +import { Observable } from "rxjs"; +import { AppSettings } from "../../../common/app-setting"; +import { DashboardWorkflowComputingUnit, WorkflowComputingUnitMetrics } from "../../types/workflow-computing-unit"; + +export const COMPUTING_UNIT_BASE_URL = "computing-unit"; +export const COMPUTING_UNIT_METRICS_BASE_URL = "resource-metrics"; +export const COMPUTING_UNIT_CREATE_URL = `${COMPUTING_UNIT_BASE_URL}/create`; +export const COMPUTING_UNIT_TERMINATE_URL = `${COMPUTING_UNIT_BASE_URL}/terminate`; +export const COMPUTING_UNIT_LIST_URL = `${COMPUTING_UNIT_BASE_URL}`; + +@Injectable({ + providedIn: "root", +}) +export class WorkflowComputingUnitManagingService { + constructor(private http: HttpClient) {} + + /** + * Create a new workflow computing unit (pod). + * @param name The name for the computing unit. + * @param cpuLimit The cpu resource limit for the computing unit. + * @param memoryLimit The memory resource limit for the computing unit. + * @param unitType + * @returns An Observable of the created WorkflowComputingUnit. + */ + public createComputingUnit( + name: string, + cpuLimit: string, + memoryLimit: string, + unitType: string = "k8s_pod" + ): Observable { + const body = { name, cpuLimit, memoryLimit, unitType }; + + return this.http.post( + `${AppSettings.getApiEndpoint()}/${COMPUTING_UNIT_CREATE_URL}`, + body + ); + } + + /** + * Terminate a computing unit (pod) by its URI. + * @returns An Observable of the server response. + * @param uri + */ + public terminateComputingUnit(uri: string): Observable { + const body = { uri: uri, name: "dummy" }; + + return this.http.post(`${AppSettings.getApiEndpoint()}/${COMPUTING_UNIT_TERMINATE_URL}`, body); + } + + /** + * List all active computing units. + * @returns An Observable of a list of WorkflowComputingUnit. + */ + public listComputingUnits(): Observable { + return this.http.get( + `${AppSettings.getApiEndpoint()}/${COMPUTING_UNIT_LIST_URL}` + ); + } + + /** + * Get a computing units resource metrics + * @returns an Observable of WorkflowComputingUnitMetrics + * @param cuid + */ + public getComputingUnitMetrics(cuid: number): Observable { + return this.http.get( + `${AppSettings.getApiEndpoint()}/${COMPUTING_UNIT_BASE_URL}/${cuid}/metrics` + ); + } +} diff --git a/core/gui/src/app/workspace/service/workflow-computing-unit/workflow-pod-brain.service.spec.ts b/core/gui/src/app/workspace/service/workflow-computing-unit/workflow-pod-brain.service.spec.ts new file mode 100644 index 00000000000..e69de29bb2d diff --git a/core/gui/src/app/workspace/service/workflow-websocket/workflow-websocket.service.ts b/core/gui/src/app/workspace/service/workflow-websocket/workflow-websocket.service.ts index e128684153c..5e3871da82d 100644 --- a/core/gui/src/app/workspace/service/workflow-websocket/workflow-websocket.service.ts +++ b/core/gui/src/app/workspace/service/workflow-websocket/workflow-websocket.service.ts @@ -13,6 +13,7 @@ import { delayWhen, filter, map, retryWhen, tap } from "rxjs/operators"; import { environment } from "../../../../environments/environment"; import { AuthService } from "../../../common/service/user/auth.service"; import { getWebsocketUrl } from "src/app/common/util/url"; +import { isDefined } from "../../../common/util/predicate"; export const WS_HEARTBEAT_INTERVAL_MS = 10000; export const WS_RECONNECT_INTERVAL_MS = 3000; @@ -21,11 +22,12 @@ export const WS_RECONNECT_INTERVAL_MS = 3000; providedIn: "root", }) export class WorkflowWebsocketService { - private static readonly TEXERA_WEBSOCKET_ENDPOINT = "wsapi/workflow-websocket"; + private static readonly TEXERA_WEBSOCKET_ENDPOINT = environment.envoyUrl; public isConnected: boolean = false; public numWorkers: number = -1; private connectedWid: number = 0; + private connectedCuid?: number; private websocket?: WebSocketSubject; private wsWithReconnectSubscription?: Subscription; @@ -63,16 +65,25 @@ export class WorkflowWebsocketService { public closeWebsocket() { this.wsWithReconnectSubscription?.unsubscribe(); this.websocket?.complete(); + this.connectedCuid = undefined; } - public openWebsocket(wId: number) { + public openWebsocket(wId: number, uId?: number, cuId?: number) { + if (uId == undefined) { + console.log(`uId is ${uId}, defaulting to uId = 1`); + uId = 1; + } const websocketUrl = getWebsocketUrl(WorkflowWebsocketService.TEXERA_WEBSOCKET_ENDPOINT, "") + "?wid=" + wId + + "&uid=" + + uId + + (isDefined(cuId) ? `&cuid=${cuId}` : "") + (environment.userSystemEnabled && AuthService.getAccessToken() !== null ? "&access-token=" + AuthService.getAccessToken() : ""); + console.log("websocketUrl", websocketUrl); this.websocket = webSocket(websocketUrl); // setup reconnection logic const wsWithReconnect = this.websocket.pipe( @@ -101,15 +112,16 @@ export class WorkflowWebsocketService { } this.isConnected = true; this.connectedWid = wId; + if (isDefined(cuId)) this.connectedCuid = cuId; }); } - public reopenWebsocket(wId: number) { - if (this.isConnected && this.connectedWid === wId) { + public reopenWebsocket(wId: number, cuId?: number) { + if (this.isConnected && this.connectedWid === wId && isDefined(cuId) && this.connectedCuid == cuId) { // prevent reconnections return; } this.closeWebsocket(); - this.openWebsocket(wId); + this.openWebsocket(wId, undefined, cuId); } } diff --git a/core/gui/src/app/workspace/types/workflow-computing-unit.ts b/core/gui/src/app/workspace/types/workflow-computing-unit.ts new file mode 100644 index 00000000000..33565b50a27 --- /dev/null +++ b/core/gui/src/app/workspace/types/workflow-computing-unit.ts @@ -0,0 +1,25 @@ +export interface WorkflowComputingUnit { + cuid: number; + uid: number; + name: string; + creationTime: number; + terminateTime: number | undefined; +} + +export interface WorkflowComputingUnitResourceLimit { + cpuLimit: string; + memoryLimit: string; +} + +export interface WorkflowComputingUnitMetrics { + cpuUsage: string; + memoryUsage: string; +} + +export interface DashboardWorkflowComputingUnit { + computingUnit: WorkflowComputingUnit; + uri: string; + status: string; + metrics: WorkflowComputingUnitMetrics; + resourceLimits: WorkflowComputingUnitResourceLimit; +} diff --git a/core/gui/src/environments/environment.default.ts b/core/gui/src/environments/environment.default.ts index 52560cdcfe0..ca66270988f 100644 --- a/core/gui/src/environments/environment.default.ts +++ b/core/gui/src/environments/environment.default.ts @@ -12,6 +12,11 @@ export const defaultEnvironment = { */ apiUrl: "api", + /** + * root API of the envoy proxy + */ + envoyUrl: "wsapi/workflow-websocket", + /** * whether export execution result is supported */ @@ -26,7 +31,12 @@ export const defaultEnvironment = { /** * whether user system is enabled */ - userSystemEnabled: false, + userSystemEnabled: true, + + /** + * whether workflow computing unit system is enabled + */ + computingUnitSystemEnabled: true, /** * whether local login is enabled @@ -50,7 +60,7 @@ export const defaultEnvironment = { /** * whether workflow executions tracking feature is enabled */ - workflowExecutionsTrackingEnabled: false, + workflowExecutionsTrackingEnabled: true, /** * whether linkBreakpoint is supported diff --git a/core/gui/src/styles.scss b/core/gui/src/styles.scss index 6c2ac14306b..fd9a924c32a 100644 --- a/core/gui/src/styles.scss +++ b/core/gui/src/styles.scss @@ -76,3 +76,21 @@ hr { .annotation-highlight { background-color: #6a5acd; } + +// For compute-unit-selection.html +#cpu-progress-bar .ant-progress-inner, +#memory-progress-bar .ant-progress-inner { + vertical-align: super !important; +} + +#cpu-progress-bar *, +#memory-progress-bar * { + line-height: 16px !important; +} + +#computing-unit-option .ant-menu-title-content { + display: flex !important; + width: 100% !important; + align-items: center !important; + justify-content: space-between !important; +} diff --git a/core/scripts/brain.sh b/core/scripts/brain.sh new file mode 100755 index 00000000000..3c925d740e5 --- /dev/null +++ b/core/scripts/brain.sh @@ -0,0 +1,2 @@ +cd workflow-pod-brain +target/universal/workflow-pod-brain-0.1.0-SNAPSHOT/bin/workflow-pod-brain server src/main/resources/config.yaml diff --git a/core/scripts/build-brain.sh b/core/scripts/build-brain.sh new file mode 100755 index 00000000000..7236ce1672b --- /dev/null +++ b/core/scripts/build-brain.sh @@ -0,0 +1,4 @@ +cd workflow-pod-brain +sbt clean dist +unzip target/universal/workflow-pod-brain-0.1.0-SNAPSHOT.zip -d target/universal/ +rm target/universal/workflow-pod-brain-0.1.0-SNAPSHOT.zip diff --git a/core/scripts/build-docker-image/webserver.sh b/core/scripts/build-docker-image/webserver.sh new file mode 100755 index 00000000000..6e835897c66 --- /dev/null +++ b/core/scripts/build-docker-image/webserver.sh @@ -0,0 +1 @@ + docker build -t bobbai/texera-webserver:dev-usersys -f core/amber/webserver.dockerfile . \ No newline at end of file diff --git a/core/scripts/build-docker-image/workflow-compiling-service.sh b/core/scripts/build-docker-image/workflow-compiling-service.sh new file mode 100755 index 00000000000..9f0237e3569 --- /dev/null +++ b/core/scripts/build-docker-image/workflow-compiling-service.sh @@ -0,0 +1 @@ + docker build -t bobbai/texera-workflow-compiling-service:dev -f core/workflow-compiling-service/workflow-compiling-service.dockerfile . \ No newline at end of file diff --git a/core/scripts/build-docker-image/workflow-computing-unit-managing-service.sh b/core/scripts/build-docker-image/workflow-computing-unit-managing-service.sh new file mode 100755 index 00000000000..c2eba708d45 --- /dev/null +++ b/core/scripts/build-docker-image/workflow-computing-unit-managing-service.sh @@ -0,0 +1 @@ + docker build -t bobbai/texera-workflow-computing-unit-managing-service:dev -f core/workflow-computing-unit-managing-service/workflow-computing-unit-managing-service.dockerfile . \ No newline at end of file diff --git a/core/scripts/build-docker-image/workflow-computing-unit.sh b/core/scripts/build-docker-image/workflow-computing-unit.sh new file mode 100755 index 00000000000..d2d4ab34fa4 --- /dev/null +++ b/core/scripts/build-docker-image/workflow-computing-unit.sh @@ -0,0 +1 @@ + docker build -t bobbai/texera-workflow-computing-unit:dev -f core/amber/computing-unit.dockerfile . \ No newline at end of file diff --git a/core/scripts/build-services.sh b/core/scripts/build-services.sh index 47f273640b1..2f344d87802 100755 --- a/core/scripts/build-services.sh +++ b/core/scripts/build-services.sh @@ -2,5 +2,8 @@ sbt clean dist unzip workflow-compiling-service/target/universal/workflow-compiling-service-0.1.0.zip -d target/ rm workflow-compiling-service/target/universal/workflow-compiling-service-0.1.0.zip +unzip workflow-computing-unit-managing-service/target/universal/workflow-computing-unit-managing-service-0.1.0.zip -d target/ +rm workflow-computing-unit-managing-service/target/universal/workflow-computing-unit-managing-service-0.1.0.zip + unzip amber/target/universal/texera-0.1-SNAPSHOT.zip -d amber/target/ rm amber/target/universal/texera-0.1-SNAPSHOT.zip diff --git a/core/scripts/deploy-brain.sh b/core/scripts/deploy-brain.sh new file mode 100755 index 00000000000..63682b67674 --- /dev/null +++ b/core/scripts/deploy-brain.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +# Start brain.sh in the background +bash scripts/brain.sh & +BRAIN_PID=$! # Store the PID of server.sh + +# Trap SIGTERM and SIGINT and forward them as SIGTERM to the background processes +trap 'kill -TERM $BRAIN_PID; wait $BRAIN_PID' SIGTERM SIGINT + +# Wait for brain.sh to complete +wait -n diff --git a/core/scripts/deploy-docker-trap.sh b/core/scripts/deploy-docker-trap.sh new file mode 100644 index 00000000000..c0d328c8963 --- /dev/null +++ b/core/scripts/deploy-docker-trap.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash + +# Start server.sh in the background +bash scripts/server.sh & +SERVER_PID=$! # Store the PID of server.sh + +# Wait for server.sh to start by sleeping for a brief period (adjust as needed) +sleep 5 + +# Check if server.sh is still running; if not, exit with an error +if ! ps -p $SERVER_PID > /dev/null; then + >&2 echo 'server.sh failed to start.' + exit 1 +fi + +# Start worker.sh in the background +bash scripts/worker.sh & +WORKER_PID=$! # Store the PID of worker.sh + +# Trap SIGTERM and SIGINT and forward them as SIGTERM to the background processes +trap 'kill -TERM $SERVER_PID; kill -TERM $WORKER_PID; wait $SERVER_PID $WORKER_PID' SIGTERM SIGINT + +# Wait for one of server.sh or worker.sh to complete +wait -n + +# # Send SIGTERM to both processes in case one exits early +# kill -TERM $SERVER_PID $WORKER_PID + +# # Give them time to shut down gracefully +# wait $SERVER_PID $WORKER_PID \ No newline at end of file diff --git a/core/scripts/server.sh b/core/scripts/server.sh index 61201d64e06..282b8cc5689 100755 --- a/core/scripts/server.sh +++ b/core/scripts/server.sh @@ -1,2 +1,3 @@ +#!/bin/bash cd amber target/texera-0.1-SNAPSHOT/bin/texera-web-application \ No newline at end of file diff --git a/core/scripts/sql/texera_ddl.sql b/core/scripts/sql/texera_ddl.sql index 244527735ae..f5be597e4ae 100644 --- a/core/scripts/sql/texera_ddl.sql +++ b/core/scripts/sql/texera_ddl.sql @@ -234,6 +234,16 @@ MODIFY COLUMN is_public BOOLEAN NOT NULL DEFAULT true; ALTER TABLE workflow CHANGE COLUMN is_published is_public BOOLEAN NOT NULL DEFAULT false; +CREATE TABLE IF NOT EXISTS workflow_computing_unit +( + `uid` INT UNSIGNED NOT NULL, + `name` VARCHAR(128) NOT NULL, + `cuid` INT UNSIGNED AUTO_INCREMENT NOT NULL, + `creation_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + `terminate_time` TIMESTAMP DEFAULT NULL, + PRIMARY KEY (`cuid`) +) ENGINE = INNODB; + CREATE TABLE IF NOT EXISTS operator_executions ( operator_execution_id BIGINT UNSIGNED AUTO_INCREMENT PRIMARY KEY, workflow_execution_id INT UNSIGNED NOT NULL, @@ -254,4 +264,4 @@ CREATE TABLE IF NOT EXISTS operator_runtime_statistics ( num_workers INT UNSIGNED NOT NULL DEFAULT 0, PRIMARY KEY (operator_execution_id, time), FOREIGN KEY (operator_execution_id) REFERENCES operator_executions (operator_execution_id) ON DELETE CASCADE -); +); \ No newline at end of file diff --git a/core/scripts/texera-helmchart/Chart.yaml b/core/scripts/texera-helmchart/Chart.yaml new file mode 100644 index 00000000000..addd437305f --- /dev/null +++ b/core/scripts/texera-helmchart/Chart.yaml @@ -0,0 +1,34 @@ +apiVersion: v2 +name: texera-helm +description: A Helm chart for Kubernetes + +# A chart can be either an 'application' or a 'library' chart. +# +# Application charts are a collection of templates that can be packaged into versioned archives +# to be deployed. +# +# Library charts provide useful utilities or functions for the chart developer. They're included as +# a dependency of application charts to inject those utilities and functions into the rendering +# pipeline. Library charts do not define any templates and therefore cannot be deployed. +type: application + +# This is the chart version. This version number should be incremented each time you make changes +# to the chart and its templates, including the app version. +# Versions are expected to follow Semantic Versioning (https://semver.org/) +version: 0.1.0 + +# This is the version number of the application being deployed. This version number should be +# incremented each time you make changes to the application. Versions are not expected to +# follow Semantic Versioning. They should reflect the version the application is using. +# It is recommended to use it with quotes. +appVersion: "1.16.0" + + +dependencies: + - name: ingress-nginx + version: 4.11.3 + repository: https://kubernetes.github.io/ingress-nginx + + - name: mysql + version: 11.1.14 + repository: https://charts.bitnami.com/bitnami \ No newline at end of file diff --git a/core/scripts/texera-helmchart/files/texera_ddl.sql b/core/scripts/texera-helmchart/files/texera_ddl.sql new file mode 100644 index 00000000000..71dd433aa6a --- /dev/null +++ b/core/scripts/texera-helmchart/files/texera_ddl.sql @@ -0,0 +1,267 @@ +CREATE SCHEMA IF NOT EXISTS `texera_db`; +USE `texera_db`; + +DROP TABLE IF EXISTS `workflow_user_access`; +DROP TABLE IF EXISTS `workflow_of_user`; +DROP TABLE IF EXISTS `user_config`; +DROP TABLE IF EXISTS `user`; +DROP TABLE IF EXISTS `workflow`; +DROP TABLE IF EXISTS `workflow_version`; +DROP TABLE IF EXISTS `project`; +DROP TABLE IF EXISTS `workflow_of_project`; +DROP TABLE IF EXISTS `workflow_executions`; +DROP TABLE IF EXISTS `dataset`; +DROP TABLE IF EXISTS `dataset_user_access`; +DROP TABLE IF EXISTS `dataset_version`; +DROP TABLE IF EXISTS operator_executions; +DROP TABLE IF EXISTS operator_runtime_statistics; + +SET PERSIST time_zone = '+00:00'; -- this line is mandatory +SET PERSIST sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY','')); + +CREATE TABLE IF NOT EXISTS user +( + `uid` INT UNSIGNED AUTO_INCREMENT NOT NULL, + `name` VARCHAR(256) NOT NULL, + `email` VARCHAR(256) UNIQUE, + `password` VARCHAR(256), + `google_id` VARCHAR(256) UNIQUE, + `role` ENUM('INACTIVE', 'RESTRICTED', 'REGULAR', 'ADMIN') NOT NULL DEFAULT 'INACTIVE', + `google_avatar` VARCHAR(100) null, + PRIMARY KEY (`uid`), + CONSTRAINT CK_nulltest + CHECK (`password` IS NOT NULL OR `google_id` IS NOT NULL) +) ENGINE = INNODB, +-- start auto increment userID from 1 because userID 0 means user not exists + AUTO_INCREMENT = 1; + +CREATE TABLE IF NOT EXISTS user_config +( + `uid` INT UNSIGNED NOT NULL, + `key` varchar(256) NOT NULL, + `value` text NOT NULL, + PRIMARY KEY (`uid`, `key`), + FOREIGN KEY (`uid`) REFERENCES user (`uid`) ON DELETE CASCADE +) ENGINE = InnoDB; + +CREATE TABLE IF NOT EXISTS workflow +( + `name` VARCHAR(128) NOT NULL, + `description` VARCHAR(500), + `wid` INT UNSIGNED AUTO_INCREMENT NOT NULL, + `content` LONGTEXT NOT NULL, + `creation_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + `last_modified_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`wid`) +) ENGINE = INNODB, + AUTO_INCREMENT = 1; + +CREATE TABLE IF NOT EXISTS workflow_of_user +( + `uid` INT UNSIGNED NOT NULL, + `wid` INT UNSIGNED NOT NULL, + PRIMARY KEY (`uid`, `wid`), + FOREIGN KEY (`uid`) REFERENCES `user` (`uid`) ON DELETE CASCADE, + FOREIGN KEY (`wid`) REFERENCES `workflow` (`wid`) ON DELETE CASCADE +) ENGINE = INNODB; + +CREATE TABLE IF NOT EXISTS workflow_user_access +( + `uid` INT UNSIGNED NOT NULL, + `wid` INT UNSIGNED NOT NULL, + `privilege` ENUM('NONE', 'READ', 'WRITE') NOT NULL DEFAULT 'NONE', + PRIMARY KEY (`uid`, `wid`), + FOREIGN KEY (`uid`) REFERENCES `user` (`uid`) ON DELETE CASCADE, + FOREIGN KEY (`wid`) REFERENCES `workflow` (`wid`) ON DELETE CASCADE +) ENGINE = INNODB; + +CREATE TABLE IF NOT EXISTS workflow_version +( + `vid` INT UNSIGNED AUTO_INCREMENT NOT NULL, + `wid` INT UNSIGNED NOT NULL, + `content` TEXT NOT NULL, + `creation_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`vid`), + FOREIGN KEY (`wid`) REFERENCES `workflow` (`wid`) ON DELETE CASCADE +) ENGINE = INNODB; + +CREATE TABLE IF NOT EXISTS project +( + `pid` INT UNSIGNED AUTO_INCREMENT NOT NULL, + `name` VARCHAR(128) NOT NULL, + `description` VARCHAR(10000), + `owner_id` INT UNSIGNED NOT NULL, + `creation_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + `color` VARCHAR(6), + UNIQUE(`owner_id`, `name`), + PRIMARY KEY (`pid`), + FOREIGN KEY (`owner_id`) REFERENCES user (`uid`) ON DELETE CASCADE +) ENGINE = INNODB, + AUTO_INCREMENT = 1; + +CREATE TABLE IF NOT EXISTS workflow_of_project +( + `wid` INT UNSIGNED NOT NULL, + `pid` INT UNSIGNED NOT NULL, + PRIMARY KEY (`wid`, `pid`), + FOREIGN KEY (`wid`) REFERENCES `workflow` (`wid`) ON DELETE CASCADE, + FOREIGN KEY (`pid`) REFERENCES `project` (`pid`) ON DELETE CASCADE +) ENGINE = INNODB; + +CREATE TABLE IF NOT EXISTS project_user_access +( + `uid` INT UNSIGNED NOT NULL, + `pid` INT UNSIGNED NOT NULL, + `privilege` ENUM('NONE', 'READ', 'WRITE') NOT NULL DEFAULT 'NONE', + PRIMARY KEY (`uid`, `pid`), + FOREIGN KEY (`uid`) REFERENCES `user` (`uid`) ON DELETE CASCADE, + FOREIGN KEY (`pid`) REFERENCES `project` (`pid`) ON DELETE CASCADE +) ENGINE = INNODB; + +CREATE TABLE IF NOT EXISTS workflow_executions +( + `eid` INT UNSIGNED AUTO_INCREMENT NOT NULL, + `vid` INT UNSIGNED NOT NULL, + `uid` INT UNSIGNED NOT NULL, + `status` TINYINT NOT NULL DEFAULT 1, + `result` TEXT, /* pointer to volume */ + `starting_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + `last_update_time` TIMESTAMP, + `bookmarked` BOOLEAN DEFAULT FALSE, + `name` VARCHAR(128) NOT NULL DEFAULT 'Untitled Execution', + `environment_version` VARCHAR(128) NOT NULL, + `log_location` TEXT, /* uri to log storage */ + PRIMARY KEY (`eid`), + FOREIGN KEY (`vid`) REFERENCES `workflow_version` (`vid`) ON DELETE CASCADE, + FOREIGN KEY (`uid`) REFERENCES `user` (`uid`) ON DELETE CASCADE +) ENGINE = INNODB; + +CREATE TABLE IF NOT EXISTS public_project +( + `pid` INT UNSIGNED NOT NULL, + `uid` INT UNSIGNED, + PRIMARY KEY (`pid`), + FOREIGN KEY (`pid`) REFERENCES `project` (`pid`) ON DELETE CASCADE +) ENGINE = INNODB; + +CREATE TABLE IF NOT EXISTS dataset +( + `did` INT UNSIGNED AUTO_INCREMENT NOT NULL, + `owner_uid` INT UNSIGNED NOT NULL, + `name` VARCHAR(128) NOT NULL, + `is_public` TINYINT NOT NULL DEFAULT 1, + `description` VARCHAR(512) NOT NULL, + `creation_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY(`did`), + FOREIGN KEY (`owner_uid`) REFERENCES `user` (`uid`) ON DELETE CASCADE +) ENGINE = INNODB; + +CREATE TABLE IF NOT EXISTS dataset_user_access +( + `did` INT UNSIGNED NOT NULL, + `uid` INT UNSIGNED NOT NULL, + `privilege` ENUM('NONE', 'READ', 'WRITE') NOT NULL DEFAULT 'NONE', + PRIMARY KEY(`did`, `uid`), + FOREIGN KEY (`did`) REFERENCES `dataset` (`did`) ON DELETE CASCADE, + FOREIGN KEY (`uid`) REFERENCES `user` (`uid`) ON DELETE CASCADE +) ENGINE = INNODB; + +CREATE TABLE IF NOT EXISTS dataset_version +( + `dvid` INT UNSIGNED AUTO_INCREMENT NOT NULL, + `did` INT UNSIGNED NOT NULL, + `creator_uid` INT UNSIGNED NOT NULL, + `name` VARCHAR(128) NOT NULL, + `version_hash` VARCHAR(64) NOT NULL, + `creation_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY(`dvid`), + FOREIGN KEY (`did`) REFERENCES `dataset` (`did`) ON DELETE CASCADE +) ENGINE = INNODB; + + +-- create fulltext search indexes + +CREATE FULLTEXT INDEX `idx_workflow_name_description_content` ON `texera_db`.`workflow` (name, description, content); + +CREATE FULLTEXT INDEX `idx_user_name` ON `texera_db`.`user` (name); + +CREATE FULLTEXT INDEX `idx_user_project_name_description` ON `texera_db`.`project` (name, description); + +CREATE FULLTEXT INDEX `idx_dataset_name_description` ON `texera_db`.`dataset` (name, description); + +CREATE FULLTEXT INDEX `idx_dataset_version_name` ON `texera_db`.`dataset_version` (name); + +ALTER TABLE workflow + ADD is_published BOOLEAN NOT NULL DEFAULT false; + +CREATE TABLE IF NOT EXISTS workflow_user_likes +( + `uid` INT UNSIGNED NOT NULL, + `wid` INT UNSIGNED NOT NULL, + PRIMARY KEY (`uid`, `wid`), + FOREIGN KEY (`uid`) REFERENCES `user` (`uid`) ON DELETE CASCADE, + FOREIGN KEY (`wid`) REFERENCES `workflow` (`wid`) ON DELETE CASCADE +) ENGINE = INNODB; + +CREATE TABLE IF NOT EXISTS workflow_user_clones +( + `uid` INT UNSIGNED NOT NULL, + `wid` INT UNSIGNED NOT NULL, + PRIMARY KEY (`uid`, `wid`), + FOREIGN KEY (`uid`) REFERENCES `user` (`uid`) ON DELETE CASCADE, + FOREIGN KEY (`wid`) REFERENCES `workflow` (`wid`) ON DELETE CASCADE +) ENGINE = INNODB; + +CREATE TABLE IF NOT EXISTS workflow_user_activity ( + `uid` INT UNSIGNED NOT NULL DEFAULT 0, + `wid` INT UNSIGNED NOT NULL, + `ip` VARCHAR(15) DEFAULT NULL, + `activate` VARCHAR(10) NOT NULL, + `activity_time` TIMESTAMP DEFAULT CURRENT_TIMESTAMP +) ENGINE = INNODB; + +CREATE TABLE IF NOT EXISTS workflow_view_count +( + `wid` INT UNSIGNED NOT NULL, + `view_count` INT UNSIGNED NOT NULL DEFAULT 0, + PRIMARY KEY (`wid`), + FOREIGN KEY (`wid`) REFERENCES `workflow` (`wid`) ON DELETE CASCADE +) ENGINE = INNODB; + +ALTER TABLE dataset + MODIFY COLUMN is_public BOOLEAN NOT NULL DEFAULT true; + +ALTER TABLE workflow + CHANGE COLUMN is_published is_public BOOLEAN NOT NULL DEFAULT false; + +CREATE TABLE IF NOT EXISTS workflow_computing_unit +( + `uid` INT UNSIGNED NOT NULL, + `name` VARCHAR(128) NOT NULL, + `cuid` INT UNSIGNED AUTO_INCREMENT NOT NULL, + `creation_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + `terminate_time` TIMESTAMP DEFAULT NULL, + PRIMARY KEY (`cuid`) +) ENGINE = INNODB; + +CREATE TABLE IF NOT EXISTS operator_executions ( + operator_execution_id BIGINT UNSIGNED AUTO_INCREMENT PRIMARY KEY, + workflow_execution_id INT UNSIGNED NOT NULL, + operator_id VARCHAR(100) NOT NULL, + UNIQUE (workflow_execution_id, operator_id), + FOREIGN KEY (workflow_execution_id) REFERENCES workflow_executions (eid) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS operator_runtime_statistics ( + operator_execution_id BIGINT UNSIGNED NOT NULL, + time TIMESTAMP(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + input_tuple_cnt BIGINT UNSIGNED NOT NULL DEFAULT 0, + output_tuple_cnt BIGINT UNSIGNED NOT NULL DEFAULT 0, + status TINYINT NOT NULL DEFAULT 1, + data_processing_time BIGINT UNSIGNED NOT NULL DEFAULT 0, + control_processing_time BIGINT UNSIGNED NOT NULL DEFAULT 0, + idle_time BIGINT UNSIGNED NOT NULL DEFAULT 0, + num_workers INT UNSIGNED NOT NULL DEFAULT 0, + PRIMARY KEY (operator_execution_id, time), + FOREIGN KEY (operator_execution_id) REFERENCES operator_executions (operator_execution_id) ON DELETE CASCADE +); \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/envoy-config.yaml b/core/scripts/texera-helmchart/templates/envoy-config.yaml new file mode 100644 index 00000000000..8dcccf1f10d --- /dev/null +++ b/core/scripts/texera-helmchart/templates/envoy-config.yaml @@ -0,0 +1,74 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: envoy-config + namespace: {{ .Values.namespace }} +data: + envoy.yaml: | + static_resources: + listeners: + - name: listener_0 + address: + socket_address: + address: 0.0.0.0 + port_value: 10000 + filter_chains: + - filters: + - name: envoy.filters.network.http_connection_manager + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager + stat_prefix: ingress_http + upgrade_configs: + - upgrade_type: websocket + route_config: + name: local_route + virtual_hosts: + - name: local_service + domains: ["*"] + routes: + - match: + prefix: "/wsapi" + route: + cluster: dynamic_service + prefix_rewrite: "/wsapi" + http_filters: + - name: envoy.filters.http.lua + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.lua.v3.Lua + inline_code: | + function envoy_on_request(request_handle) + local uri = request_handle:headers():get(":path") + local cuid = string.match(uri, "cuid=(%d+)") + if cuid then + local new_host = "computing-unit-" .. cuid .. ".workflow-computing-unit-svc.workflow-computing-unit-pool.svc.cluster.local:8085" + request_handle:headers():replace(":authority", new_host) + end + end + - name: envoy.filters.http.dynamic_forward_proxy + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.dynamic_forward_proxy.v3.FilterConfig + dns_cache_config: + name: dynamic_dns_cache + dns_lookup_family: V4_ONLY + dns_refresh_rate: 1s + - name: envoy.filters.http.router + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router + + access_log: + - name: envoy.access_loggers.stdout + typed_config: + "@type": type.googleapis.com/envoy.extensions.access_loggers.stream.v3.StdoutAccessLog + + clusters: + - name: dynamic_service + connect_timeout: 0.25s + lb_policy: CLUSTER_PROVIDED + cluster_type: + name: envoy.clusters.dynamic_forward_proxy + typed_config: + "@type": type.googleapis.com/envoy.extensions.clusters.dynamic_forward_proxy.v3.ClusterConfig + dns_cache_config: + name: dynamic_dns_cache + dns_lookup_family: V4_ONLY + dns_refresh_rate: 1s \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/envoy-deployment.yaml b/core/scripts/texera-helmchart/templates/envoy-deployment.yaml new file mode 100644 index 00000000000..a5c5aedd9d4 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/envoy-deployment.yaml @@ -0,0 +1,32 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Release.Name }}-envoy-deployment + namespace: {{ .Values.namespace }} +spec: + replicas: {{ .Values.envoy.replicas | default 1 }} + selector: + matchLabels: + app: envoy + template: + metadata: + labels: + app: envoy + spec: + containers: + - name: envoy + image: "{{ .Values.envoy.image.repository }}:{{ .Values.envoy.image.tag }}" + ports: + - containerPort: {{ .Values.envoy.port}} + volumeMounts: + - name: envoy-config + mountPath: /etc/envoy + readOnly: true + args: + - "-c" + - "/etc/envoy/envoy.yaml" # Specify the path to the configuration file + # - "--log-level debug" # Set level of logging + volumes: + - name: envoy-config + configMap: + name: envoy-config # Reference the ConfigMap created earlier \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/envoy-service.yaml b/core/scripts/texera-helmchart/templates/envoy-service.yaml new file mode 100644 index 00000000000..b4c23b9f186 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/envoy-service.yaml @@ -0,0 +1,17 @@ +apiVersion: v1 +kind: Service +metadata: + name: envoy-svc + namespace: {{ .Values.namespace }} +spec: + type: {{ .Values.envoy.service.type }} + selector: + app: envoy + ports: + - protocol: TCP + port: {{ .Values.envoy.service.port }} + targetPort: {{ .Values.envoy.service.port }} + # if service type is set to NodePort, include nodePort attribute + {{- if eq .Values.envoy.service.type "NodePort" }} + nodePort: {{ .Values.envoy.service.nodePort }} + {{- end }} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/ingress.yaml b/core/scripts/texera-helmchart/templates/ingress.yaml new file mode 100644 index 00000000000..c0129020091 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/ingress.yaml @@ -0,0 +1,25 @@ +{{- if .Values.ingressPaths.enabled }} +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: {{ .Release.Name }}-ingress + namespace: {{ .Values.namespace }} + annotations: + # nginx.ingress.kubernetes.io/rewrite-target: /$1 + nginx.ingress.kubernetes.io/use-regex: "true" +spec: + ingressClassName: nginx + rules: + - host: {{ .Values.ingressPaths.hostname }} + http: + paths: + {{- range .Values.ingressPaths.paths }} + - path: {{ .path }} + pathType: Prefix + backend: + service: + name: {{ .serviceName }} + port: + number: {{ .servicePort }} + {{- end }} +{{- end }} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/metrics-server.yaml b/core/scripts/texera-helmchart/templates/metrics-server.yaml new file mode 100644 index 00000000000..31ab7e011b3 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/metrics-server.yaml @@ -0,0 +1,202 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + labels: + k8s-app: metrics-server + name: metrics-server + namespace: kube-system +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRole +metadata: + labels: + k8s-app: metrics-server + rbac.authorization.k8s.io/aggregate-to-admin: "true" + rbac.authorization.k8s.io/aggregate-to-edit: "true" + rbac.authorization.k8s.io/aggregate-to-view: "true" + name: system:aggregated-metrics-reader +rules: +- apiGroups: + - metrics.k8s.io + resources: + - pods + - nodes + verbs: + - get + - list + - watch +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRole +metadata: + labels: + k8s-app: metrics-server + name: system:metrics-server +rules: +- apiGroups: + - "" + resources: + - nodes/metrics + verbs: + - get +- apiGroups: + - "" + resources: + - pods + - nodes + verbs: + - get + - list + - watch +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + labels: + k8s-app: metrics-server + name: metrics-server-auth-reader + namespace: kube-system +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: extension-apiserver-authentication-reader +subjects: +- kind: ServiceAccount + name: metrics-server + namespace: kube-system +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + labels: + k8s-app: metrics-server + name: metrics-server:system:auth-delegator +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: system:auth-delegator +subjects: +- kind: ServiceAccount + name: metrics-server + namespace: kube-system +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + labels: + k8s-app: metrics-server + name: system:metrics-server +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: system:metrics-server +subjects: +- kind: ServiceAccount + name: metrics-server + namespace: kube-system +--- +apiVersion: v1 +kind: Service +metadata: + labels: + k8s-app: metrics-server + name: metrics-server + namespace: kube-system +spec: + ports: + - name: https + port: 443 + protocol: TCP + targetPort: https + selector: + k8s-app: metrics-server +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + k8s-app: metrics-server + name: metrics-server + namespace: kube-system +spec: + selector: + matchLabels: + k8s-app: metrics-server + strategy: + rollingUpdate: + maxUnavailable: 0 + template: + metadata: + labels: + k8s-app: metrics-server + spec: + containers: + - args: + - --cert-dir=/tmp + - --secure-port=10250 + - --kubelet-preferred-address-types=InternalIP,ExternalIP,Hostname + - --kubelet-use-node-status-port + - --metric-resolution=15s + - --kubelet-insecure-tls + image: registry.k8s.io/metrics-server/metrics-server:v0.7.2 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 3 + httpGet: + path: /livez + port: https + scheme: HTTPS + periodSeconds: 10 + name: metrics-server + ports: + - containerPort: 10250 + name: https + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: /readyz + port: https + scheme: HTTPS + initialDelaySeconds: 20 + periodSeconds: 10 + resources: + requests: + cpu: 100m + memory: 200Mi + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + runAsNonRoot: true + runAsUser: 1000 + seccompProfile: + type: RuntimeDefault + volumeMounts: + - mountPath: /tmp + name: tmp-dir + nodeSelector: + kubernetes.io/os: linux + priorityClassName: system-cluster-critical + serviceAccountName: metrics-server + volumes: + - emptyDir: {} + name: tmp-dir +--- +apiVersion: apiregistration.k8s.io/v1 +kind: APIService +metadata: + labels: + k8s-app: metrics-server + name: v1beta1.metrics.k8s.io +spec: + group: metrics.k8s.io + groupPriorityMinimum: 100 + insecureSkipTLSVerify: true + service: + name: metrics-server + namespace: kube-system + version: v1beta1 + versionPriority: 100 diff --git a/core/scripts/texera-helmchart/templates/mysql-init-db-config.yaml b/core/scripts/texera-helmchart/templates/mysql-init-db-config.yaml new file mode 100644 index 00000000000..788cd65356d --- /dev/null +++ b/core/scripts/texera-helmchart/templates/mysql-init-db-config.yaml @@ -0,0 +1,10 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: mysql-init-script + namespace: {{ .Values.namespace }} + labels: + app: {{ .Release.Name }} +data: + init.sql: | +{{ .Files.Get "files/texera_ddl.sql" | indent 4 }} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/webserver-deployment.yaml b/core/scripts/texera-helmchart/templates/webserver-deployment.yaml new file mode 100644 index 00000000000..6d2e6d19db9 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/webserver-deployment.yaml @@ -0,0 +1,36 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Release.Name }}-{{ .Values.webserver.name }} + namespace: {{ .Values.namespace }} + labels: + app: {{ .Release.Name }}-{{ .Values.webserver.name }} +spec: + replicas: {{ .Values.webserver.numOfPods }} + selector: + matchLabels: + app: {{ .Release.Name }}-{{ .Values.webserver.name }} + template: + metadata: + labels: + app: {{ .Release.Name }}-{{ .Values.webserver.name }} + spec: + volumes: + - name: {{ .Values.webserver.name }}-pv + persistentVolumeClaim: + claimName: {{ .Values.webserver.name }}-pvc + containers: + - name: {{ .Values.webserver.name }} + image: {{ .Values.webserver.imageName }} + ports: + - containerPort: {{ .Values.webserver.service.port }} + env: + - name: JDBC_URL + value: "jdbc:mysql://{{ .Release.Name }}-mysql.{{ .Values.namespace }}.svc.cluster.local:3306/{{ .Values.mysql.auth.database }}" + - name: JDBC_USERNAME + value: {{ .Values.mysql.auth.username }} + - name: JDBC_PASSWORD + value: {{ .Values.mysql.auth.password }} + volumeMounts: + - name: {{ .Values.webserver.name }}-pv + mountPath: {{ .Values.webserver.volume.mountPath }} # Specify the mount path in the container \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/webserver-pv.yaml b/core/scripts/texera-helmchart/templates/webserver-pv.yaml new file mode 100644 index 00000000000..6e3d236c5ad --- /dev/null +++ b/core/scripts/texera-helmchart/templates/webserver-pv.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: PersistentVolume +metadata: + name: {{ .Values.webserver.name }}-pv + namespace: {{ .Values.namespace }} + labels: + app: {{ .Release.Name }}-{{ .Values.webserver.name }} +spec: + capacity: + storage: {{ .Values.webserver.volume.size }} + accessModes: {{ toYaml .Values.webserver.volume.accessModes | nindent 2 }} + persistentVolumeReclaimPolicy: Retain + storageClassName: {{ .Values.webserver.volume.storageClassName }} + hostPath: + path: {{ .Values.webserver.volume.hostPath }} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/webserver-pvc.yaml b/core/scripts/texera-helmchart/templates/webserver-pvc.yaml new file mode 100644 index 00000000000..7a652ee533b --- /dev/null +++ b/core/scripts/texera-helmchart/templates/webserver-pvc.yaml @@ -0,0 +1,11 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: {{ .Values.webserver.name }}-pvc + namespace: {{ .Values.namespace }} +spec: + accessModes: {{ toYaml .Values.webserver.volume.accessModes | nindent 2 }} + resources: + requests: + storage: {{ .Values.webserver.volume.size }} + storageClassName: {{ .Values.webserver.volume.storageClassName }} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/webserver-service.yaml b/core/scripts/texera-helmchart/templates/webserver-service.yaml new file mode 100644 index 00000000000..c8e6a98ab23 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/webserver-service.yaml @@ -0,0 +1,18 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ .Values.webserver.name }}-svc + namespace: {{ .Values.namespace }} +spec: + type: {{ .Values.webserver.service.type }} + selector: + app: {{ .Release.Name }}-{{ .Values.webserver.name }} + ports: + - name: api-port + protocol: TCP + port: {{ .Values.webserver.service.port }} + targetPort: {{ .Values.webserver.service.port }} + # if service type is set to NodePort, include nodePort attribute + {{- if eq .Values.webserver.service.type "NodePort" }} + nodePort: {{ .Values.webserver.service.nodePort }} + {{- end }} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/workflow-compiling-service-deployment.yaml b/core/scripts/texera-helmchart/templates/workflow-compiling-service-deployment.yaml new file mode 100644 index 00000000000..30efec250bb --- /dev/null +++ b/core/scripts/texera-helmchart/templates/workflow-compiling-service-deployment.yaml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Release.Name }}-{{ .Values.workflowCompilingService.name }} + namespace: {{ .Values.namespace }} + labels: + app: {{ .Release.Name }}-{{ .Values.workflowCompilingService.name }} +spec: + replicas: {{ .Values.workflowCompilingService.numOfPods }} + selector: + matchLabels: + app: {{ .Release.Name }}-{{ .Values.workflowCompilingService.name }} + template: + metadata: + labels: + app: {{ .Release.Name }}-{{ .Values.workflowCompilingService.name }} + spec: + containers: + - name: {{ .Values.workflowCompilingService.name }} + image: {{ .Values.workflowCompilingService.imageName }} + ports: + - containerPort: {{ .Values.workflowCompilingService.service.port }} + env: + - name: JDBC_URL + value: "jdbc:mysql://{{ .Release.Name }}-mysql.{{ .Values.namespace }}.svc.cluster.local:3306/{{ .Values.mysql.auth.database }}" + - name: JDBC_USERNAME + value: {{ .Values.mysql.auth.username }} + - name: JDBC_PASSWORD + value: {{ .Values.mysql.auth.password }} +{{/* - name: MONGODB_URL*/}} +{{/* value: "mongodb://{{ .Release.Name }}-mongodb.{{ .Release.Namespace }}.svc.cluster.local:27017/{{ index .Values.mongodb.auth.databases 0 }}"*/}} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/workflow-compiling-service-service.yaml b/core/scripts/texera-helmchart/templates/workflow-compiling-service-service.yaml new file mode 100644 index 00000000000..c377418299b --- /dev/null +++ b/core/scripts/texera-helmchart/templates/workflow-compiling-service-service.yaml @@ -0,0 +1,18 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ .Values.workflowCompilingService.name }}-svc + namespace: {{ .Values.namespace }} +spec: + type: {{ .Values.workflowCompilingService.service.type }} + selector: + app: {{ .Release.Name }}-{{ .Values.workflowCompilingService.name }} + ports: + - name: api-port + protocol: TCP + port: {{ .Values.workflowCompilingService.service.port }} + targetPort: {{ .Values.workflowCompilingService.service.port }} + # if service type is set to NodePort, include nodePort attribute + {{- if eq .Values.workflowCompilingService.service.type "NodePort" }} + nodePort: {{ .Values.workflowCompilingService.service.nodePort }} + {{- end }} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-deployment.yaml b/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-deployment.yaml new file mode 100644 index 00000000000..f062dd85487 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-deployment.yaml @@ -0,0 +1,32 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Release.Name }}-{{ .Values.workflowComputingUnitManager.name }} + namespace: {{ .Values.namespace }} + labels: + app: {{ .Release.Name }}-{{ .Values.workflowComputingUnitManager.name }} +spec: + replicas: {{ .Values.workflowComputingUnitManager.numOfPods }} + selector: + matchLabels: + app: {{ .Release.Name }}-{{ .Values.workflowComputingUnitManager.name }} + template: + metadata: + labels: + app: {{ .Release.Name }}-{{ .Values.workflowComputingUnitManager.name }} + spec: + serviceAccountName: {{ .Values.workflowComputingUnitManager.serviceAccountName }} + containers: + - name: {{ .Values.workflowComputingUnitManager.name }} + image: {{ .Values.workflowComputingUnitManager.imageName }} + ports: + - containerPort: {{ .Values.workflowComputingUnitManager.service.port }} + env: + - name: JDBC_URL + value: "jdbc:mysql://{{ .Release.Name }}-mysql.{{ .Values.namespace }}.svc.cluster.local:3306/{{ .Values.mysql.auth.database }}" + - name: JDBC_USERNAME + value: {{ .Values.mysql.auth.username }} + - name: JDBC_PASSWORD + value: {{ .Values.mysql.auth.password }} +{{/* - name: MONGODB_URL*/}} +{{/* value: "mongodb://{{ .Release.Name }}-mongodb.{{ .Release.Namespace }}.svc.cluster.local:27017/{{ index .Values.mongodb.auth.databases 0 }}"*/}} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-service-account.yaml b/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-service-account.yaml new file mode 100644 index 00000000000..cd14d575c06 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-service-account.yaml @@ -0,0 +1,34 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ .Values.workflowComputingUnitManager.serviceAccountName }} + namespace: {{ .Values.namespace }} + +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: {{ .Values.workflowComputingUnitManager.name }} + namespace: {{ .Values.workflowComputingUnitPool.namespace }} +rules: + - apiGroups: [""] + resources: ["pods"] + verbs: ["get", "list", "watch", "create", "delete"] + - apiGroups: ["metrics.k8s.io"] # Added metrics permissions + resources: ["pods"] + verbs: ["list", "get"] # Added metrics permissions + +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: {{ .Values.workflowComputingUnitManager.name }}-binding + namespace: {{ .Values.workflowComputingUnitPool.namespace }} +subjects: + - kind: ServiceAccount + name: {{ .Values.workflowComputingUnitManager.serviceAccountName }} + namespace: {{ .Values.namespace }} +roleRef: + kind: Role + name: {{ .Values.workflowComputingUnitManager.name }} + apiGroup: rbac.authorization.k8s.io \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-service.yaml b/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-service.yaml new file mode 100644 index 00000000000..f90b1ee4e95 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-service.yaml @@ -0,0 +1,17 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ .Values.workflowComputingUnitManager.name }}-svc + namespace: {{ .Values.namespace }} +spec: + type: {{ .Values.workflowComputingUnitManager.service.type }} + selector: + app: {{ .Release.Name }}-{{ .Values.workflowComputingUnitManager.name }} + ports: + - protocol: TCP + port: {{ .Values.workflowComputingUnitManager.service.port }} + targetPort: {{ .Values.workflowComputingUnitManager.service.port }} + # if service type is set to NodePort, include nodePort attribute + {{- if eq .Values.workflowComputingUnitManager.service.type "NodePort" }} + nodePort: {{ .Values.workflowComputingUnitManager.service.nodePort }} + {{- end }} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/workflow-computing-units-deployment.yaml b/core/scripts/texera-helmchart/templates/workflow-computing-units-deployment.yaml new file mode 100644 index 00000000000..90f89c3cc56 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/workflow-computing-units-deployment.yaml @@ -0,0 +1,32 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Release.Name }}-{{ .Values.workflowComputingUnit.name }} + namespace: {{ .Values.namespace }} + labels: + app: {{ .Release.Name }}-{{ .Values.workflowComputingUnit.name }} +spec: + replicas: {{ .Values.workflowComputingUnit.numOfPods | default 1 }} + selector: + matchLabels: + app: {{ .Release.Name }}-{{ .Values.workflowComputingUnit.name }} + template: + metadata: + labels: + app: {{ .Release.Name }}-{{ .Values.workflowComputingUnit.name }} + spec: + containers: + - name: {{ .Values.workflowComputingUnit.name }} + image: {{ .Values.workflowComputingUnit.imageName }} + imagePullPolicy: Never + ports: + - containerPort: {{ .Values.workflowComputingUnitPool.service.port }} + env: + - name: JDBC_URL + value: "jdbc:mysql://{{ .Release.Name }}-mysql.{{ .Values.namespace }}.svc.cluster.local:3306/{{ .Values.mysql.auth.database }}" + - name: JDBC_USERNAME + value: {{ .Values.mysql.auth.username }} + - name: JDBC_PASSWORD + value: {{ .Values.mysql.auth.password }} +{{/* - name: MONGODB_URL*/}} +{{/* value: "mongodb://{{ .Release.Name }}-mongodb.{{ .Release.Namespace }}.svc.cluster.local:27017/{{ index .Values.mongodb.auth.databases 0 }}"*/}} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/workflow-computing-units-namespace.yaml b/core/scripts/texera-helmchart/templates/workflow-computing-units-namespace.yaml new file mode 100644 index 00000000000..f2bb8733fc2 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/workflow-computing-units-namespace.yaml @@ -0,0 +1,6 @@ +{{- if .Values.createNamespaces }} +apiVersion: v1 +kind: Namespace +metadata: + name: {{ .Values.workflowComputingUnitPool.namespace }} +{{- end }} diff --git a/core/scripts/texera-helmchart/templates/workflow-computing-units-service.yaml b/core/scripts/texera-helmchart/templates/workflow-computing-units-service.yaml new file mode 100644 index 00000000000..f4c0e9ea5a9 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/workflow-computing-units-service.yaml @@ -0,0 +1,13 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ .Values.workflowComputingUnitPool.name }}-svc + namespace: {{ .Values.workflowComputingUnitPool.namespace }} +spec: + clusterIP: None + selector: + type: computing-unit # TODO: consider change this + ports: + - protocol: TCP + port: {{ .Values.workflowComputingUnitPool.service.port }} + targetPort: {{ .Values.workflowComputingUnitPool.service.targetPort }} diff --git a/core/scripts/texera-helmchart/values.yaml b/core/scripts/texera-helmchart/values.yaml new file mode 100644 index 00000000000..152bf441ac6 --- /dev/null +++ b/core/scripts/texera-helmchart/values.yaml @@ -0,0 +1,145 @@ +namespace: texera-dev + +createNamespaces: true + +# Part 1: the configuration of mongodb and mysql +# We use external charts from bitnami, and pass the values stated below to them. +# For bitnami/mysql, check out: https://artifacthub.io/packages/helm/bitnami/mysql +# For bitnami/mongodb, check out: https://artifacthub.io/packages/helm/bitnami/mongodb +# +#mysqlPv: +# name: mysql-pv +# size: 10Gi +# path: /mnt/data/mysql # the path on the node that contains the pv +# node: minikube # the node that contains the pv + +mysql: + namespaceOverride: texera-dev + image: + debug: true + + initdbScriptsConfigMap: mysql-init-script + auth: + rootPassword: texera + database: texera_db + username: texera + password: texera + +# primary: +# startupProbe: +# initialDelaySeconds: 60 +# +# persistence: +# enabled: true +# existingClaim: mysql-pvc +# +# architecture: standalone + +webserver: + name: webserver + numOfPods: 3 # Number of pods for the Texera deployment + imageName: bobbai/texera-webserver:dev-usersys # image name of the texera + service: + type: NodePort # for testing purpose, NodePort mode is fine + port: 8080 # port of the pod + nodePort: 30081 # exposed port + volume: + enabled: true + size: 10Gi + hostPath: /mnt/data/webserver # Path on the host machine + mountPath: /core/amber/user-resources # Path inside the container + storageClassName: manual + accessModes: + - ReadWriteMany + +workflowComputingUnitManager: + name: workflow-computing-unit-manager + numOfPods: 1 + serviceAccountName: workflow-computing-unit-manager-service-account + imageName: bobbai/texera-workflow-computing-unit-managing-service:dev + service: + type: NodePort # for testing purpose, NodePort mode is fine + port: 8888 # port of the pod + nodePort: 30082 # exposed port + +workflowCompilingService: + name: workflow-compiling-service + numOfPods: 1 + imageName: bobbai/texera-workflow-compiling-service:dev + service: + type: NodePort + port: 9090 + nodePort: 30083 + +workflowComputingUnit: + name: workflow-computing-unit + numOfPods: 1 + imageName: bobbai/texera-workflow-computing-unit:dev + + +# Config required for envoy and workflow pods +envoy: + replicas: 1 + image: + repository: envoyproxy/envoy + tag: v1.31-latest + port: + 10000 + debug: false + service: + type: NodePort + port: 10000 + nodePort: 30084 + +# headless service +# each pod's url is: computing-unit-%cuid.workflow-computing-unit.workflow-computing-unit-pool.svc.cluster.local +workflowComputingUnitPool: + name: workflow-computing-unit + namespace: workflow-computing-unit-pool + service: + port: 8085 + targetPort: 8085 + + +# Ingress dependency configs +ingress-nginx: + controller: + replicaCount: 1 + service: + type: NodePort + nodePorts: + http: 30080 + ingressClassResource: + name: nginx + enabled: true + resources: + limits: + cpu: 100m + memory: 128Mi + requests: + cpu: 100m + memory: 128Mi + rbac: + create: true + + +# Custom Ingress resource configs +ingressPaths: + enabled: true + hostname: "texera.example.com" + paths: + - path: /api/computing-unit + serviceName: workflow-computing-unit-manager-svc + servicePort: 8888 + - path: /api/compile + serviceName: workflow-compiling-service-svc + servicePort: 9090 + - path: /wsapi/workflow-websocket + serviceName: envoy-svc + servicePort: 10000 + - path: /api + serviceName: webserver-svc + servicePort: 8080 + - path: / + serviceName: webserver-svc + servicePort: 8080 diff --git a/core/scripts/workflow-compiling-service.sh b/core/scripts/workflow-compiling-service.sh index 297035a557e..6e4942ac423 100755 --- a/core/scripts/workflow-compiling-service.sh +++ b/core/scripts/workflow-compiling-service.sh @@ -1 +1,2 @@ +#!/bin/bash target/workflow-compiling-service-0.1.0/bin/workflow-compiling-service \ No newline at end of file diff --git a/core/scripts/workflow-computing-unit-managing-service.sh b/core/scripts/workflow-computing-unit-managing-service.sh new file mode 100755 index 00000000000..100ceabb053 --- /dev/null +++ b/core/scripts/workflow-computing-unit-managing-service.sh @@ -0,0 +1,2 @@ +#!/bin/bash +target/workflow-computing-unit-managing-service-0.1.0/bin/workflow-computing-unit-managing-service \ No newline at end of file diff --git a/core/scripts/workflow-computing-unit.sh b/core/scripts/workflow-computing-unit.sh index f722b933cb4..ab7419a5881 100755 --- a/core/scripts/workflow-computing-unit.sh +++ b/core/scripts/workflow-computing-unit.sh @@ -1,3 +1,4 @@ +#!/bin/bash cd amber if [ ! -z $1 ] then diff --git a/core/workflow-compiling-service/workflow-compiling-service.dockerfile b/core/workflow-compiling-service/workflow-compiling-service.dockerfile new file mode 100644 index 00000000000..50339888a0c --- /dev/null +++ b/core/workflow-compiling-service/workflow-compiling-service.dockerfile @@ -0,0 +1,17 @@ +FROM sbtscala/scala-sbt:eclipse-temurin-jammy-11.0.17_8_1.9.3_2.13.11 + +# copy all projects under core to /core +WORKDIR /core +COPY core/ . + +RUN rm -rf amber/user-resources/* + +RUN apt-get update && apt-get install -y unzip + +# build the service +WORKDIR /core +RUN scripts/build-services.sh + +CMD ["scripts/workflow-compiling-service.sh"] + +EXPOSE 9090 \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/README.md b/core/workflow-computing-unit-managing-service/README.md new file mode 100644 index 00000000000..6958f77116d --- /dev/null +++ b/core/workflow-computing-unit-managing-service/README.md @@ -0,0 +1,62 @@ +# Workflow Pod Brain + +## Architecture + +![](./architecture.JPG) + +## GUI Design and User Experience +A power button is added next to the `Run` button. +![image](https://github.com/user-attachments/assets/afce0224-fe3b-4fde-a953-bc3810a86c63) + +By default, no pod is running. When user click on this power button, the button turns into a spinning wheel before the pod is ready. +![image](https://github.com/user-attachments/assets/6c5c04fc-12c0-45f8-ba74-7bd05425242f) +![image](https://github.com/user-attachments/assets/c955288c-f0e1-4984-b6b8-2207ec3b9273) +Now the pod is ready, the button becomes a terminate button, when user click it, the pod will be terminated. After termination, the button becomes power button again. +![image](https://github.com/user-attachments/assets/25d0ea02-628b-47ca-9b9e-7ed27de1ae7b) +![image](https://github.com/user-attachments/assets/f230d545-defe-478d-8f1e-238abe22ac6f) + +### Involved GUI codes + +The component for the workflow's workspace menu is: `core/gui/src/app/workspace/component/menu`, you will need to modify this component to add such button. Please try to make this button a **standalone** component, and let the existing `menu` component to import the power button you wrote. This can keep the code clean. + +For sending request to the backend from the frontend, you may add a new `service` in the gui(e.g. `workflow-computing-unit-managing.service.ts`). You can refer to other existing services under `core/gui/src/app/workspace/service` to create a new one just for the workflow pod brain. + +## Structure Overview + +### Configuration Files +Under `src/main/resources/`, there are two configuration files: +- application.conf: define the kubernetes config and the mysql connection config +- config.yaml: define the web application's config + +All configuration itmes are written by `src/main/scala/config/ApplicationConf.scala`, you can directly use `config` object to access them in the application codes. + +### Application codes + +All application codes are under `src/main/scala` +- WorkflowPodBrainApplication.scala: the main launcher the of the workflow pod app +- service/KubernetesClientService.scala(TODO): the encapsulation of Kubernetes pod creation/deletion/query logics +- web/ + - model: contains the sql related connection, and classes generated by jooq to manipulate/query tables in DB + - resources: the RESTful endpoints of the application. `HelloWorldResource` is the hello world endpoint, `WorkflowPodBrainResource`(TODO) are endpoints for workflow-pod related logics + +## How to get started + +### Dependencies +Similar to texera project, the `Java 11`, `sbt` are required + +### Local development setup + +Intellij is highly recommended. You can re-use the IDE setup of the texera project. + +### Launch the App + +To launch the application, build and run `core/workflow-pod-brain/src/main/scala/WorkflowPodBrainApplication.scala`, below is the setup in Intellij: +![](./idea-config.png) + + +## How to collaborate + +1. Checkout from this branch, `workflow-pod` +2. If you have changes ready to review, submit the PR from `your-dev-branch` to `workflow-pod` through Github. The review is done via Github +3. If `workflow-pod` is updated, Jiadong will send the slack message, and you can update your local `workflow-pod` branch and rebase `your-dev-branch` on top of `workflow-pod` +4. If any changes need to be made in `workflow-pod`, feel free to DM/email Jiadong(jiadongb@uci.edu) diff --git a/core/workflow-computing-unit-managing-service/architecture.JPG b/core/workflow-computing-unit-managing-service/architecture.JPG new file mode 100644 index 00000000000..ac5b78a78b6 Binary files /dev/null and b/core/workflow-computing-unit-managing-service/architecture.JPG differ diff --git a/core/workflow-computing-unit-managing-service/build.sbt b/core/workflow-computing-unit-managing-service/build.sbt new file mode 100644 index 00000000000..e0bdda78c89 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/build.sbt @@ -0,0 +1,34 @@ +import scala.collection.Seq + +name := "workflow-computing-unit-managing-service" +organization := "edu.uci.ics" +version := "0.1.0" + +enablePlugins(JavaAppPackaging) + +ThisBuild / version := "0.1.0-SNAPSHOT" +ThisBuild / scalaVersion := "2.13.12" + +// Dependency Versions +val dropwizardVersion = "4.0.7" + +// Dependencies +libraryDependencies ++= Seq( + "io.dropwizard" % "dropwizard-core" % dropwizardVersion, + "io.kubernetes" % "client-java" % "21.0.0", + "org.jooq" % "jooq" % "3.14.16", + "com.typesafe" % "config" % "1.4.1", + "mysql" % "mysql-connector-java" % "8.0.33", + "com.softwaremill.sttp.client4" %% "core" % "4.0.0-M6", + "com.lihaoyi" %% "upickle" % "3.1.0", + "com.typesafe" % "config" % "1.4.2", + "io.fabric8" % "kubernetes-client" % "6.12.1" +) + +// Compiler Options +Compile / scalacOptions ++= Seq( + "-Xelide-below", "WARNING", + "-feature", + "-deprecation", + "-Ywarn-unused:imports" +) \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/idea-config.png b/core/workflow-computing-unit-managing-service/idea-config.png new file mode 100644 index 00000000000..c507fbc7381 Binary files /dev/null and b/core/workflow-computing-unit-managing-service/idea-config.png differ diff --git a/core/workflow-computing-unit-managing-service/project/build.properties b/core/workflow-computing-unit-managing-service/project/build.properties new file mode 100644 index 00000000000..49214c4bb46 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/project/build.properties @@ -0,0 +1 @@ +sbt.version = 1.9.9 diff --git a/core/workflow-computing-unit-managing-service/project/plugins.sbt b/core/workflow-computing-unit-managing-service/project/plugins.sbt new file mode 100644 index 00000000000..ebebbb50109 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.9.16") \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/scripts/envoy-configmap.yaml b/core/workflow-computing-unit-managing-service/scripts/envoy-configmap.yaml new file mode 100644 index 00000000000..126f1bca597 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/scripts/envoy-configmap.yaml @@ -0,0 +1,75 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: envoy-config + namespace: {{ .Values.namespace }} +data: + envoy.yaml: | + static_resources: + listeners: + - name: listener_0 + address: + socket_address: + address: 0.0.0.0 + port_value: 10000 + filter_chains: + - filters: + - name: envoy.filters.network.http_connection_manager + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager + stat_prefix: ingress_http + upgrade_configs: + - upgrade_type: websocket + route_config: + name: local_route + virtual_hosts: + - name: local_service + domains: ["*"] + routes: + - match: + prefix: "/wsapi/workflow-websocket" + route: + cluster: dynamic_service + prefix_rewrite: "/wsapi/workflow-websocket" + http_filters: + - name: envoy.filters.http.lua + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.lua.v3.Lua + inline_code: | + function envoy_on_request(request_handle) + local uri = request_handle:headers():get(":path") + local wid = string.match(uri, "wid=(%d+)") + local uid = string.match(uri, "uid=(%d+)") + if uid then + local new_host = "user-pod-" .. uid .. "-" .. wid .. ".workflow-pods.wf-pod-pool.svc.cluster.local:8080" + request_handle:headers():replace(":authority", new_host) + end + end + - name: envoy.filters.http.dynamic_forward_proxy + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.dynamic_forward_proxy.v3.FilterConfig + dns_cache_config: + name: dynamic_dns_cache + dns_lookup_family: V4_ONLY + dns_refresh_rate: 1s + - name: envoy.filters.http.router + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router + + access_log: + - name: envoy.access_loggers.stdout + typed_config: + "@type": type.googleapis.com/envoy.extensions.access_loggers.stream.v3.StdoutAccessLog + + clusters: + - name: dynamic_service + connect_timeout: 0.25s + lb_policy: CLUSTER_PROVIDED + cluster_type: + name: envoy.clusters.dynamic_forward_proxy + typed_config: + "@type": type.googleapis.com/envoy.extensions.clusters.dynamic_forward_proxy.v3.ClusterConfig + dns_cache_config: + name: dynamic_dns_cache + dns_lookup_family: V4_ONLY + dns_refresh_rate: 1s \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/scripts/envoy-deployment.yaml b/core/workflow-computing-unit-managing-service/scripts/envoy-deployment.yaml new file mode 100644 index 00000000000..d38bad35113 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/scripts/envoy-deployment.yaml @@ -0,0 +1,32 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: envoy-deployment + namespace: {{ .Values.namespace }} +spec: + replicas: 1 + selector: + matchLabels: + app: envoy + template: + metadata: + labels: + app: envoy + spec: + containers: + - name: envoy + image: envoyproxy/envoy:v1.31-latest # Use the latest stable version + ports: + - containerPort: 10000 # Expose port 10000 for traffic + volumeMounts: + - name: envoy-config + mountPath: /etc/envoy + readOnly: true + args: + - "-c" + - "/etc/envoy/envoy.yaml" # Specify the path to the configuration file + # - "--log-level debug" # Set level of logging + volumes: + - name: envoy-config + configMap: + name: envoy-config # Reference the ConfigMap created earlier \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/scripts/texera_workflow_pod.sql b/core/workflow-computing-unit-managing-service/scripts/texera_workflow_pod.sql new file mode 100644 index 00000000000..a4685108cda --- /dev/null +++ b/core/workflow-computing-unit-managing-service/scripts/texera_workflow_pod.sql @@ -0,0 +1,14 @@ +USE `texera_db`; + +CREATE TABLE IF NOT EXISTS pod +( + `uid` INT UNSIGNED NOT NULL, + `wid` INT UNSIGNED NOT NULL, + `name` VARCHAR(128) NOT NULL, + `pod_uid` VARCHAR(128) NOT NULL, + `creation_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + `terminate_time` TIMESTAMP DEFAULT NULL, + FOREIGN KEY (`uid`) REFERENCES `user` (`uid`), + PRIMARY KEY (`pod_uid`) + ) ENGINE = INNODB, + AUTO_INCREMENT = 1; \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/scripts/workflow_pods_namespaces.yaml b/core/workflow-computing-unit-managing-service/scripts/workflow_pods_namespaces.yaml new file mode 100644 index 00000000000..ae27e9524c3 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/scripts/workflow_pods_namespaces.yaml @@ -0,0 +1,9 @@ +apiVersion: v1 +kind: Namespace +metadata: + name: wf-pod-brain +--- +apiVersion: v1 +kind: Namespace +metadata: + name: wf-pod-pool diff --git a/core/workflow-computing-unit-managing-service/scripts/workflow_pods_service.yaml b/core/workflow-computing-unit-managing-service/scripts/workflow_pods_service.yaml new file mode 100644 index 00000000000..2f2df901c99 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/scripts/workflow_pods_service.yaml @@ -0,0 +1,13 @@ +apiVersion: v1 +kind: Service +metadata: + name: workflow-pods + namespace: wf-pod-pool +spec: + clusterIP: None + selector: + workflow: worker + ports: + - protocol: TCP + port: 8080 + targetPort: 8080 \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/src/main/resources/application.conf b/core/workflow-computing-unit-managing-service/src/main/resources/application.conf new file mode 100644 index 00000000000..72095c5b6a7 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/src/main/resources/application.conf @@ -0,0 +1,12 @@ +kubernetes { + compute-unit-pool-namespace = "workflow-computing-unit-pool" + compute-unit-pool-namespace = ${?KUBERNETES_COMPUTE_UNIT_POOL_NAMESPACE} + + compute-unit-service-name = "workflow-computing-unit-svc" + compute-unit-service-name = ${?KUBERNETES_COMPUTE_UNIT_SERVICE_NAME} + + image-name = "bobbai/texera-workflow-computing-unit:dev" + image-name = ${?KUBERNETES_IMAGE_NAME} + + port-num = 8085 +} \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/src/main/resources/workflow-computing-unit-managing-service-config.yaml b/core/workflow-computing-unit-managing-service/src/main/resources/workflow-computing-unit-managing-service-config.yaml new file mode 100644 index 00000000000..176bcc2f963 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/src/main/resources/workflow-computing-unit-managing-service-config.yaml @@ -0,0 +1,13 @@ +server: + applicationConnectors: + - type: http + port: 8888 + + adminConnectors: + - type: http + port: 8082 + +logging: + level: INFO + loggers: + "com.example": DEBUG \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/src/main/scala/config/WorkflowComputingUnitManagingServiceConf.scala b/core/workflow-computing-unit-managing-service/src/main/scala/config/WorkflowComputingUnitManagingServiceConf.scala new file mode 100644 index 00000000000..e0b8e31d1a0 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/src/main/scala/config/WorkflowComputingUnitManagingServiceConf.scala @@ -0,0 +1,15 @@ +package config + +import com.typesafe.config.{Config, ConfigFactory} + +object WorkflowComputingUnitManagingServiceConf { + + // Load the configuration + private val conf: Config = ConfigFactory.load() + + // Access the Kubernetes settings with environment variable fallback + val computeUnitServiceName: String = conf.getString("kubernetes.compute-unit-service-name") + val computeUnitPoolNamespace: String = conf.getString("kubernetes.compute-unit-pool-namespace") + val computeUnitImageName: String = conf.getString("kubernetes.image-name") + val computeUnitPortNumber: Int = conf.getInt("kubernetes.port-num") +} diff --git a/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/WorkflowComputingUnitManagingService.scala b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/WorkflowComputingUnitManagingService.scala new file mode 100644 index 00000000000..298718902d3 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/WorkflowComputingUnitManagingService.scala @@ -0,0 +1,40 @@ +package edu.uci.ics.texera.service + +import com.fasterxml.jackson.module.scala.DefaultScalaModule +import edu.uci.ics.amber.util.PathUtils.workflowComputingUnitManagingServicePath +import edu.uci.ics.texera.service.resource.WorkflowComputingUnitManagingResource +import io.dropwizard.core.setup.{Bootstrap, Environment} +import io.dropwizard.core.Application + +class WorkflowComputingUnitManagingService + extends Application[WorkflowComputingUnitManagingServiceConfiguration] { + + override def initialize( + bootstrap: Bootstrap[WorkflowComputingUnitManagingServiceConfiguration] + ): Unit = { + // register scala module to dropwizard default object mapper + bootstrap.getObjectMapper.registerModule(DefaultScalaModule) + } + override def run( + configuration: WorkflowComputingUnitManagingServiceConfiguration, + environment: Environment + ): Unit = { + // Register http resources + environment.jersey.setUrlPattern("/api/*") + environment.jersey().register(new WorkflowComputingUnitManagingResource) + } +} + +object WorkflowComputingUnitManagingService { + def main(args: Array[String]): Unit = { + val configFilePath = workflowComputingUnitManagingServicePath + .resolve("src") + .resolve("main") + .resolve("resources") + .resolve("workflow-computing-unit-managing-service-config.yaml") + .toAbsolutePath + .toString + + new WorkflowComputingUnitManagingService().run("server", configFilePath) + } +} diff --git a/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/WorkflowComputingUnitManagingServiceConfiguration.scala b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/WorkflowComputingUnitManagingServiceConfiguration.scala new file mode 100644 index 00000000000..fbf431f10f7 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/WorkflowComputingUnitManagingServiceConfiguration.scala @@ -0,0 +1,5 @@ +package edu.uci.ics.texera.service + +import io.dropwizard.core.Configuration + +class WorkflowComputingUnitManagingServiceConfiguration extends Configuration {} diff --git a/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/resource/WorkflowComputingUnitManagingResource.scala b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/resource/WorkflowComputingUnitManagingResource.scala new file mode 100644 index 00000000000..3f492987254 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/resource/WorkflowComputingUnitManagingResource.scala @@ -0,0 +1,207 @@ +package edu.uci.ics.texera.service.resource + +import edu.uci.ics.amber.core.storage.StorageConfig +import edu.uci.ics.texera.dao.SqlServer +import edu.uci.ics.texera.dao.SqlServer.withTransaction +import edu.uci.ics.texera.dao.jooq.generated.tables.daos.WorkflowComputingUnitDao +import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT +import edu.uci.ics.texera.dao.jooq.generated.tables.pojos.WorkflowComputingUnit +import edu.uci.ics.texera.service.resource.WorkflowComputingUnitManagingResource.{ + DashboardWorkflowComputingUnit, + TerminationResponse, + WorkflowComputingUnitCreationParams, + WorkflowComputingUnitMetrics, + WorkflowComputingUnitResourceLimit, + WorkflowComputingUnitTerminationParams, + context +} +import edu.uci.ics.texera.service.util.KubernetesClientService +import edu.uci.ics.texera.service.util.KubernetesMetricService.{getPodLimits, getPodMetrics} +import jakarta.ws.rs._ +import jakarta.ws.rs.core.MediaType +import org.jooq.DSLContext +import org.jooq.types.UInteger + +import java.sql.Timestamp + +object WorkflowComputingUnitManagingResource { + + private lazy val context: DSLContext = SqlServer + .getInstance(StorageConfig.jdbcUrl, StorageConfig.jdbcUsername, StorageConfig.jdbcPassword) + .createDSLContext() + + case class WorkflowComputingUnitCreationParams( + name: String, + unitType: String, + cpuLimit: String, + memoryLimit: String + ) + + case class WorkflowComputingUnitTerminationParams(uri: String, name: String) + + case class WorkflowComputingUnitResourceLimit( + cpuLimit: String, + memoryLimit: String + ) + + case class WorkflowComputingUnitMetrics( + cpuUsage: String, + memoryUsage: String + ) + + case class DashboardWorkflowComputingUnit( + computingUnit: WorkflowComputingUnit, + uri: String, + status: String, + metrics: WorkflowComputingUnitMetrics, + resourceLimits: WorkflowComputingUnitResourceLimit + ) + + case class TerminationResponse(message: String, uri: String) +} + +@Produces(Array(MediaType.APPLICATION_JSON)) +@Path("/computing-unit") +class WorkflowComputingUnitManagingResource { + + /** + * Create a new pod for the given user ID. + * + * @param param The parameters containing the user ID. + * @return The created pod or an error response. + */ + @POST + @Consumes(Array(MediaType.APPLICATION_JSON)) + @Produces(Array(MediaType.APPLICATION_JSON)) + @Path("/create") + def createWorkflowComputingUnit( + param: WorkflowComputingUnitCreationParams + ): DashboardWorkflowComputingUnit = { + try { + withTransaction(context) { ctx => + val wcDao = new WorkflowComputingUnitDao(ctx.configuration()) + + val computingUnit = new WorkflowComputingUnit() + + computingUnit.setUid(UInteger.valueOf(0)) + computingUnit.setName(param.name) + computingUnit.setCreationTime(new Timestamp(System.currentTimeMillis())) + + // Insert using the DAO + wcDao.insert(computingUnit) + + // Retrieve the generated CUID + val cuid = ctx.lastID().intValue() + val insertedUnit = wcDao.fetchOneByCuid(UInteger.valueOf(cuid)) + + // Create the pod with the generated CUID + val pod = KubernetesClientService.createPod(cuid, param.cpuLimit, param.memoryLimit) + + // Return the dashboard response + DashboardWorkflowComputingUnit( + insertedUnit, + KubernetesClientService.generatePodURI(cuid).toString, + pod.getStatus.getPhase, + getComputingUnitMetric(cuid.toString), + WorkflowComputingUnitResourceLimit(param.cpuLimit, param.memoryLimit) + ) + } + } + } + + /** + * List all computing units created by the current user. + * + * @return A list of computing units that are not terminated. + */ + @GET + @Consumes(Array(MediaType.APPLICATION_JSON)) + @Produces(Array(MediaType.APPLICATION_JSON)) + @Path("") + def listComputingUnits(): java.util.List[DashboardWorkflowComputingUnit] = { + withTransaction(context) { ctx => + val result = ctx + .select() + .from(WORKFLOW_COMPUTING_UNIT) + .where(WORKFLOW_COMPUTING_UNIT.TERMINATE_TIME.isNull) // Filter out terminated units + .fetch() + .map(record => { + val unit = record.into(WORKFLOW_COMPUTING_UNIT).into(classOf[WorkflowComputingUnit]) + val cuid = unit.getCuid.intValue() + val podName = KubernetesClientService.generatePodName(cuid) + val pod = KubernetesClientService.getPodByName(podName) + + DashboardWorkflowComputingUnit( + computingUnit = unit, + uri = KubernetesClientService.generatePodURI(cuid).toString, + status = + if (pod != null && pod.getStatus != null) pod.getStatus.getPhase else "Unknown", + getComputingUnitMetric(cuid.toString), + getComputingUnitLimits(cuid.toString) + ) + }) + + result + } + } + + /** + * Terminate the computing unit's pod based on the pod URI. + * + * @param param The parameters containing the pod URI. + * @return A response indicating success or failure. + */ + @POST + @Consumes(Array(MediaType.APPLICATION_JSON)) + @Produces(Array(MediaType.APPLICATION_JSON)) + @Path("/terminate") + def terminateComputingUnit(param: WorkflowComputingUnitTerminationParams): TerminationResponse = { + // Attempt to delete the pod using the provided URI + val podURI = param.uri + KubernetesClientService.deletePod(podURI) + + // If successful, update the database + withTransaction(context) { ctx => + val cuDao = new WorkflowComputingUnitDao(ctx.configuration()) + val cuid = KubernetesClientService.parseCUIDFromURI(podURI) + val units = cuDao.fetchByCuid(UInteger.valueOf(cuid)) + + units.forEach(unit => unit.setTerminateTime(new Timestamp(System.currentTimeMillis()))) + cuDao.update(units) + } + + TerminationResponse(s"Successfully terminated compute unit with URI $podURI", podURI) + } + + /** + * Retrieves the CPU and memory metrics for a computing unit identified by its `cuid`. + * + * @param cuid The computing unit ID. + * @return A `WorkflowComputingUnitMetrics` object with CPU and memory usage data. + */ + @GET + @Produces(Array(MediaType.APPLICATION_JSON)) + @Path("/{cuid}/metrics") + def getComputingUnitMetric(@PathParam("cuid") cuid: String): WorkflowComputingUnitMetrics = { + val metrics: Map[String, String] = getPodMetrics(cuid.toInt) + + WorkflowComputingUnitMetrics( + metrics.getOrElse("cpu", ""), + metrics.getOrElse("memory", "") + ) + } + + @GET + @Produces(Array(MediaType.APPLICATION_JSON)) + @Path("/{cuid}/limits") + def getComputingUnitLimits( + @PathParam("cuid") cuid: String + ): WorkflowComputingUnitResourceLimit = { + val podLimits: Map[String, String] = getPodLimits(cuid.toInt) + + WorkflowComputingUnitResourceLimit( + podLimits.getOrElse("cpu", ""), + podLimits.getOrElse("memory", "") + ) + } +} diff --git a/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/util/KubernetesClientService.scala b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/util/KubernetesClientService.scala new file mode 100644 index 00000000000..f2b6d07fc9e --- /dev/null +++ b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/util/KubernetesClientService.scala @@ -0,0 +1,219 @@ +package edu.uci.ics.texera.service.util + +import config.WorkflowComputingUnitManagingServiceConf +import config.WorkflowComputingUnitManagingServiceConf.{ + computeUnitImageName, + computeUnitPortNumber, + computeUnitServiceName +} +import edu.uci.ics.amber.core.storage.StorageConfig +import io.kubernetes.client.custom.Quantity +import io.kubernetes.client.openapi.apis.CoreV1Api +import io.kubernetes.client.openapi.models._ +import io.kubernetes.client.openapi.{ApiClient, Configuration} +import io.kubernetes.client.util.Config + +import java.util +import scala.jdk.CollectionConverters.CollectionHasAsScala + +object KubernetesClientService { + + private val podNamePrefix = "computing-unit" + // Create Kubernetes Core and Apps clients + private val coreApi: CoreV1Api = { + val client: ApiClient = Config.defaultClient() + Configuration.setDefaultApiClient(client) + new CoreV1Api(client) + } + + private val poolNamespace: String = + WorkflowComputingUnitManagingServiceConf.computeUnitPoolNamespace + + /** + * Generates a URI for the pod based on the computing unit ID (cuid). + * + * @param cuid The computing unit ID. + * @return A URI representing the pod location. + */ + def generatePodURI(cuid: Int): String = { + s"${generatePodName(cuid)}.$computeUnitServiceName.$poolNamespace.svc.cluster.local" + } + + /** + * Generate pod name using the cuid + * + * @param cuid The computing unit ID + * @return The pod name + */ + def generatePodName(cuid: Int): String = s"$podNamePrefix-$cuid" + + /** + * Parses the computing unit ID (cuid) from a given pod URI. + * + * @param uri The pod URI. + * @return The extracted computing unit ID as an integer. + */ + def parseCUIDFromURI(uri: String): Int = { + val pattern = """computing-unit-(\d+).*""".r + uri match { + case pattern(cuid) => cuid.toInt + case _ => throw new IllegalArgumentException(s"Invalid pod URI: $uri") + } + } + + /** + * Retrieves the list of all pods in the specified namespace. + * + * @param namespace The namespace of the pods to be returned. + * @return A list of V1Pod objects. + */ + def getPodsList(namespace: String): List[V1Pod] = { + coreApi.listNamespacedPod(namespace).execute().getItems.asScala.toList + } + + /** + * Retrieves the list of pods for a given label in the specified namespace. + * + * @param namespace The namespace of the pods to be returned. + * @param podLabel The label of the pods to be returned. + * @return A list of V1Pod objects representing the pods with the given label. + */ + def getPodsList(namespace: String, podLabel: String): List[V1Pod] = { + coreApi.listNamespacedPod(namespace).labelSelector(podLabel).execute().getItems.asScala.toList + } + + /** + * Retrieves a single pod with the given label in the specified namespace. + * + * @param namespace The namespace of the pod to be returned. + * @param podLabel The label of the pod to be returned. + * @return A V1Pod object representing the pod with the given label. + */ + def getPodFromLabel(namespace: String, podLabel: String): V1Pod = { + val podsList = getPodsList(namespace, podLabel) + if (podsList.isEmpty) { + null + } else { + podsList.last + } + } + + /** + * Checks if the pod is in the desired status. + * + * @param podName The name of the pod. + * @param desiredState The desired state. + * @return Boolean indicating if the pod is in the desired state. + */ + private def isPodInDesiredState(podName: String, desiredState: String): Boolean = { + val pod = coreApi.readNamespacedPod(podName, poolNamespace).execute() + println(pod.getStatus.getPhase) + pod.getStatus.getPhase == desiredState + } + + def getPodByName(podName: String): V1Pod = { + coreApi.readNamespacedPod(podName, poolNamespace).execute() + } + + /** + * Creates a new pod under the specified namespace for the given computing unit ID. + * + * @param cuid The computing unit ID. + * @return The newly created V1Pod object. + */ + def createPod(cuid: Int, cpuLimit: String, memoryLimit: String): V1Pod = { + val podName = generatePodName(cuid) + if (getPodFromLabel(poolNamespace, s"name=$podName") != null) { + throw new Exception(s"Pod with cuid $cuid already exists") + } + + val cpu: Quantity = new Quantity(cpuLimit) + val memory: Quantity = new Quantity(memoryLimit) + + val pod: V1Pod = new V1Pod() + .apiVersion("v1") + .kind("Pod") + .metadata( + new V1ObjectMeta() + .name(podName) + .namespace(poolNamespace) + .labels( + util.Map.of( + "type", + "computing-unit", + "cuid", + String.valueOf(cuid), + "name", + podName + ) + ) + ) + .spec( + new V1PodSpec() + .overhead(null) // https://github.com/kubernetes-client/java/issues/3076 + .containers( + util.List.of( + new V1Container() + .name("computing-unit-master") + .image(computeUnitImageName) + .ports(util.List.of(new V1ContainerPort().containerPort(computeUnitPortNumber))) + .env( + util.List.of( + new V1EnvVar().name("JDBC_URL").value(StorageConfig.jdbcUrl), + new V1EnvVar().name("JDBC_USERNAME").value(StorageConfig.jdbcUsername), + new V1EnvVar().name("JDBC_PASSWORD").value(StorageConfig.jdbcPassword) + ) + ) + .resources( + new V1ResourceRequirements() + .limits( + util.Map.of( + "cpu", + cpu, + "memory", + memory + ) + ) // may want to add requests as well to make efficient use of the CPU resources + ) + ) + ) + .hostname(podName) + .subdomain(computeUnitServiceName) + ) + + coreApi.createNamespacedPod(poolNamespace, pod).execute() + } + + /** + * Deletes an existing pod using the pod URI. + * + * @param podURI The URI of the pod to delete. + */ + def deletePod(podURI: String): Unit = { + val cuid = parseCUIDFromURI(podURI) + coreApi.deleteNamespacedPod(generatePodName(cuid), poolNamespace).execute() + } + + /** + * Waits for the pod to reach the desired status. + * + * @param cuid The computing unit ID. + * @param desiredStatus The desired pod status. + */ + private def waitForPodStatus(cuid: Int, desiredStatus: String): Unit = { + var attempts = 0 + val maxAttempts = 60 + val podName = generatePodName(cuid) + while (attempts < maxAttempts && !isPodInDesiredState(podName, desiredStatus)) { + attempts += 1 + Thread.sleep(1000) + println(s"Waiting for pod $podName to reach $desiredStatus (attempt $attempts)") + } + + if (!isPodInDesiredState(podName, desiredStatus)) { + throw new RuntimeException( + s"Pod $podName failed to reach $desiredStatus after $maxAttempts attempts" + ) + } + } +} diff --git a/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/util/KubernetesMetricService.scala b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/util/KubernetesMetricService.scala new file mode 100644 index 00000000000..ec49ed4ad45 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/util/KubernetesMetricService.scala @@ -0,0 +1,75 @@ +package edu.uci.ics.texera.service.util + +import config.WorkflowComputingUnitManagingServiceConf +import io.fabric8.kubernetes.api.model.metrics.v1beta1.PodMetricsList +import io.fabric8.kubernetes.api.model.PodList +import io.fabric8.kubernetes.client.{KubernetesClient, KubernetesClientBuilder} + +import scala.jdk.CollectionConverters._ + +object KubernetesMetricService { + + // Initialize the Kubernetes client + val client: KubernetesClient = new KubernetesClientBuilder().build() + + private val namespace = WorkflowComputingUnitManagingServiceConf.computeUnitPoolNamespace + + /** + * Retrieves the pod metric for a given ID in the specified namespace. + * + * @param cuid The computing unit id of the pod + * @return The Pod metrics for a given name in a specified namespace. + */ + def getPodMetrics(cuid: Int): Map[String, String] = { + val podMetricsList: PodMetricsList = client.top().pods().metrics(namespace) + val targetPodName = KubernetesClientService.generatePodName(cuid) + + podMetricsList.getItems.asScala + .collectFirst { + case podMetrics if podMetrics.getMetadata.getName == targetPodName => + podMetrics.getContainers.asScala + .collectFirst { + case container => + container.getUsage.asScala.collect { + case (metric, value) => + println(s"Metric - $metric: ${value}") + // CPU is in nanocores and Memory is in Kibibyte + metric -> value.toString + }.toMap + } + .getOrElse(Map.empty[String, String]) + } + .getOrElse { + println(s"No metrics found for pod: $targetPodName in namespace: $namespace") + Map.empty[String, String] + } + } + + /** + * Retrieves the pod limits for a given ID in the specified namespace. + * + * @param cuid The computing unit id of the pod + * @return The Pod limits for a given name in a specified namespace. + */ + def getPodLimits(cuid: Int): Map[String, String] = { + val podList: PodList = client.pods().inNamespace(namespace).list() + val targetPodName = KubernetesClientService.generatePodName(cuid) + + val pod = podList.getItems.asScala.find(pod => { + pod.getMetadata.getName.equals(targetPodName) + }) + + val limits: Map[String, String] = pod + .flatMap { pod => + pod.getSpec.getContainers.asScala.headOption.map { container => + container.getResources.getLimits.asScala.map { + case (key, value) => + key -> value.toString + }.toMap + } + } + .getOrElse(Map.empty[String, String]) + println(limits.toString()) + limits + } +} diff --git a/core/workflow-computing-unit-managing-service/workflow-computing-unit-managing-service.dockerfile b/core/workflow-computing-unit-managing-service/workflow-computing-unit-managing-service.dockerfile new file mode 100644 index 00000000000..3cae3a68e7f --- /dev/null +++ b/core/workflow-computing-unit-managing-service/workflow-computing-unit-managing-service.dockerfile @@ -0,0 +1,16 @@ +FROM sbtscala/scala-sbt:eclipse-temurin-jammy-11.0.17_8_1.9.3_2.13.11 + +# copy all projects under core to /core +WORKDIR /core +COPY core/ . + +RUN rm -rf amber/user-resources/* +RUN apt-get update && apt-get install -y unzip + +# build the service +WORKDIR /core +RUN scripts/build-services.sh + +CMD ["scripts/workflow-computing-unit-managing-service.sh"] + +EXPOSE 8888 \ No newline at end of file diff --git a/core/workflow-core/build.sbt b/core/workflow-core/build.sbt index e3a2ebb7eec..f60109a06a3 100644 --- a/core/workflow-core/build.sbt +++ b/core/workflow-core/build.sbt @@ -173,4 +173,5 @@ libraryDependencies ++= Seq( "org.eclipse.jgit" % "org.eclipse.jgit" % "5.13.0.202109080827-r", // jgit "org.yaml" % "snakeyaml" % "1.30", // yaml reader (downgrade to 1.30 due to dropwizard 1.3.23 required by amber) "org.apache.commons" % "commons-vfs2" % "2.9.0", // for FileResolver throw VFS-related exceptions + "com.typesafe" % "config" % "1.4.2" ) \ No newline at end of file diff --git a/core/workflow-core/src/main/resources/storage-config.yaml b/core/workflow-core/src/main/resources/storage-config.yaml deleted file mode 100644 index 9df5ee3b385..00000000000 --- a/core/workflow-core/src/main/resources/storage-config.yaml +++ /dev/null @@ -1,30 +0,0 @@ -storage: - result-storage-mode: iceberg - mongodb: - url: "mongodb://localhost:27017" - database: "texera_storage" - commit-batch-size: 1000 - iceberg: - catalog: - type: hadoop # either hadoop, rest, or postgres - rest-uri: "" # the uri of the rest catalog, not needed unless using REST catalog - postgres: - # do not include scheme in the uri as Python and Java use different schemes - uri-without-scheme: "localhost:5432/texera_iceberg_catalog" - username: "texera_iceberg_admin" # replace with actual username - password: "password" # replace with actual password - table: - namespace: "operator-port-result" - commit: - batch-size: 4096 # decide the buffer size of our IcebergTableWriter - retry: - # retry configures the OCC parameter for concurrent write operations in Iceberg - # Docs about Reliability in Iceberg: https://iceberg.apache.org/docs/1.7.1/reliability/ - # Docs about full parameter list and their meaning: https://iceberg.apache.org/docs/1.7.1/configuration/#write-properties - num-retries: 10 - min-wait-ms: 100 # 0.1s - max-wait-ms: 10000 # 10s - jdbc: - url: "jdbc:mysql://localhost:3306/texera_db?serverTimezone=UTC" - username: "" - password: "" \ No newline at end of file diff --git a/core/workflow-core/src/main/resources/storage.conf b/core/workflow-core/src/main/resources/storage.conf new file mode 100644 index 00000000000..0b4dee3e50d --- /dev/null +++ b/core/workflow-core/src/main/resources/storage.conf @@ -0,0 +1,68 @@ +storage { + result-storage-mode = iceberg + result-storage-mode = ${?STORAGE_RESULT_MODE} + + mongodb { + url = "mongodb://localhost:27017" + url = ${?MONGODB_URL} + + database = "texera_storage" + database = ${?MONGODB_DATABASE} + + commit-batch-size = 1000 + commit-batch-size = ${?MONGODB_BATCH_SIZE} + } + + jdbc { + url = "jdbc:mysql://localhost:3306/texera_db?serverTimezone=UTC" + url = ${?JDBC_URL} + + username = "" + username = ${?JDBC_USERNAME} + + password = "" + password = ${?JDBC_PASSWORD} + } + + iceberg { + catalog { + type = hadoop + type = ${?ICEBERG_CATALOG_TYPE} + + rest-uri = "" + rest-uri = ${?ICEBERG_REST_URI} + + postgres { + uri-without-scheme = "localhost:5432/texera_iceberg_catalog" + uri-without-scheme = ${?ICEBERG_POSTGRES_URI_WITHOUT_SCHEME} + + username = "texera_iceberg_admin" + username = ${?ICEBERG_POSTGRES_USERNAME} + + password = "password" + password = ${?ICEBERG_POSTGRES_PASSWORD} + } + } + + table { + namespace = "operator-port-result" + namespace = ${?ICEBERG_TABLE_NAMESPACE} + + commit { + batch-size = 4096 + batch-size = ${?ICEBERG_TABLE_COMMIT_BATCH_SIZE} + + retry { + num-retries = 10 + num-retries = ${?ICEBERG_TABLE_COMMIT_NUM_RETRIES} + + min-wait-ms = 100 + min-wait-ms = ${?ICEBERG_TABLE_COMMIT_MIN_WAIT_MS} + + max-wait-ms = 10000 + max-wait-ms = ${?ICEBERG_TABLE_COMMIT_MAX_WAIT_MS} + } + } + } + } +} \ No newline at end of file diff --git a/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/StorageConfig.scala b/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/StorageConfig.scala index 1bb94629725..f95264611a2 100644 --- a/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/StorageConfig.scala +++ b/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/StorageConfig.scala @@ -1,158 +1,50 @@ package edu.uci.ics.amber.core.storage +import com.typesafe.config.{Config, ConfigFactory} import edu.uci.ics.amber.util.PathUtils.corePath -import org.yaml.snakeyaml.Yaml import java.nio.file.Path -import java.util.{Map => JMap} -import scala.jdk.CollectionConverters._ object StorageConfig { - private val conf: Map[String, Any] = { - val yaml = new Yaml() - val inputStream = getClass.getClassLoader.getResourceAsStream("storage-config.yaml") - val javaConf = yaml.load(inputStream).asInstanceOf[JMap[String, Any]].asScala.toMap - val storageMap = javaConf("storage").asInstanceOf[JMap[String, Any]].asScala.toMap - val mongodbMap = storageMap("mongodb").asInstanceOf[JMap[String, Any]].asScala.toMap - val icebergMap = storageMap("iceberg").asInstanceOf[JMap[String, Any]].asScala.toMap - val icebergCatalogMap = icebergMap("catalog").asInstanceOf[JMap[String, Any]].asScala.toMap - val icebergPostgresMap = - icebergCatalogMap("postgres").asInstanceOf[JMap[String, Any]].asScala.toMap - val icebergTableMap = icebergMap("table").asInstanceOf[JMap[String, Any]].asScala.toMap - val icebergCommitMap = icebergTableMap("commit").asInstanceOf[JMap[String, Any]].asScala.toMap - val icebergRetryMap = icebergCommitMap("retry").asInstanceOf[JMap[String, Any]].asScala.toMap - val jdbcMap = storageMap("jdbc").asInstanceOf[JMap[String, Any]].asScala.toMap - - javaConf.updated( - "storage", - storageMap - .updated("mongodb", mongodbMap) - .updated( - "iceberg", - icebergMap - .updated( - "table", - icebergTableMap.updated( - "commit", - icebergCommitMap.updated("retry", icebergRetryMap) - ) - ) - .updated( - "catalog", - icebergCatalogMap.updated("postgres", icebergPostgresMap) - ) - ) - .updated("jdbc", jdbcMap) - ) - } - - // Result storage mode - val resultStorageMode: String = - conf("storage").asInstanceOf[Map[String, Any]]("result-storage-mode").asInstanceOf[String] - - // MongoDB configurations - val mongodbUrl: String = conf("storage") - .asInstanceOf[Map[String, Any]]("mongodb") - .asInstanceOf[Map[String, Any]]("url") - .asInstanceOf[String] - - val mongodbDatabaseName: String = conf("storage") - .asInstanceOf[Map[String, Any]]("mongodb") - .asInstanceOf[Map[String, Any]]("database") - .asInstanceOf[String] - - val mongodbBatchSize: Int = conf("storage") - .asInstanceOf[Map[String, Any]]("mongodb") - .asInstanceOf[Map[String, Any]]("commit-batch-size") - .asInstanceOf[Int] - - val icebergTableNamespace: String = conf("storage") - .asInstanceOf[Map[String, Any]]("iceberg") - .asInstanceOf[Map[String, Any]]("table") - .asInstanceOf[Map[String, Any]]("namespace") - .asInstanceOf[String] - - val icebergTableCommitBatchSize: Int = conf("storage") - .asInstanceOf[Map[String, Any]]("iceberg") - .asInstanceOf[Map[String, Any]]("table") - .asInstanceOf[Map[String, Any]]("commit") - .asInstanceOf[Map[String, Any]]("batch-size") - .asInstanceOf[Int] - - val icebergTableCommitNumRetries: Int = conf("storage") - .asInstanceOf[Map[String, Any]]("iceberg") - .asInstanceOf[Map[String, Any]]("table") - .asInstanceOf[Map[String, Any]]("commit") - .asInstanceOf[Map[String, Any]]("retry") - .asInstanceOf[Map[String, Any]]("num-retries") - .asInstanceOf[Int] - - val icebergTableCommitMinRetryWaitMs: Int = conf("storage") - .asInstanceOf[Map[String, Any]]("iceberg") - .asInstanceOf[Map[String, Any]]("table") - .asInstanceOf[Map[String, Any]]("commit") - .asInstanceOf[Map[String, Any]]("retry") - .asInstanceOf[Map[String, Any]]("min-wait-ms") - .asInstanceOf[Int] - - val icebergTableCommitMaxRetryWaitMs: Int = conf("storage") - .asInstanceOf[Map[String, Any]]("iceberg") - .asInstanceOf[Map[String, Any]]("table") - .asInstanceOf[Map[String, Any]]("commit") - .asInstanceOf[Map[String, Any]]("retry") - .asInstanceOf[Map[String, Any]]("max-wait-ms") - .asInstanceOf[Int] - - // Iceberg catalog configurations - val icebergCatalogType: String = conf("storage") - .asInstanceOf[Map[String, Any]]("iceberg") - .asInstanceOf[Map[String, Any]]("catalog") - .asInstanceOf[Map[String, Any]]("type") - .asInstanceOf[String] - - val icebergRESTCatalogUri: String = conf("storage") - .asInstanceOf[Map[String, Any]]("iceberg") - .asInstanceOf[Map[String, Any]]("catalog") - .asInstanceOf[Map[String, Any]]("rest-uri") - .asInstanceOf[String] - - val icebergPostgresCatalogUriWithoutScheme: String = conf("storage") - .asInstanceOf[Map[String, Any]]("iceberg") - .asInstanceOf[Map[String, Any]]("catalog") - .asInstanceOf[Map[String, Any]]("postgres") - .asInstanceOf[Map[String, Any]]("uri-without-scheme") - .asInstanceOf[String] - - val icebergPostgresCatalogUsername: String = conf("storage") - .asInstanceOf[Map[String, Any]]("iceberg") - .asInstanceOf[Map[String, Any]]("catalog") - .asInstanceOf[Map[String, Any]]("postgres") - .asInstanceOf[Map[String, Any]]("username") - .asInstanceOf[String] - - val icebergPostgresCatalogPassword: String = conf("storage") - .asInstanceOf[Map[String, Any]]("iceberg") - .asInstanceOf[Map[String, Any]]("catalog") - .asInstanceOf[Map[String, Any]]("postgres") - .asInstanceOf[Map[String, Any]]("password") - .asInstanceOf[String] - - // JDBC configurations - val jdbcUrl: String = conf("storage") - .asInstanceOf[Map[String, Any]]("jdbc") - .asInstanceOf[Map[String, Any]]("url") - .asInstanceOf[String] - - val jdbcUsername: String = conf("storage") - .asInstanceOf[Map[String, Any]]("jdbc") - .asInstanceOf[Map[String, Any]]("username") - .asInstanceOf[String] - - val jdbcPassword: String = conf("storage") - .asInstanceOf[Map[String, Any]]("jdbc") - .asInstanceOf[Map[String, Any]]("password") - .asInstanceOf[String] + // Load configuration + private val conf: Config = ConfigFactory.parseResources("storage.conf").resolve() + + // General storage settings + val resultStorageMode: String = conf.getString("storage.result-storage-mode") + + // MongoDB specifics + val mongodbUrl: String = conf.getString("storage.mongodb.url") + val mongodbDatabaseName: String = conf.getString("storage.mongodb.database") + val mongodbBatchSize: Int = conf.getInt("storage.mongodb.commit-batch-size") + + // JDBC specifics + val jdbcUrl: String = conf.getString("storage.jdbc.url") + val jdbcUsername: String = conf.getString("storage.jdbc.username") + val jdbcPassword: String = conf.getString("storage.jdbc.password") + + // Iceberg specifics + val icebergCatalogType: String = conf.getString("storage.iceberg.catalog.type") + val icebergRESTCatalogUri: String = conf.getString("storage.iceberg.catalog.rest-uri") + + // Iceberg Postgres specifics + val icebergPostgresCatalogUriWithoutScheme: String = + conf.getString("storage.iceberg.catalog.postgres.uri-without-scheme") + val icebergPostgresCatalogUsername: String = + conf.getString("storage.iceberg.catalog.postgres.username") + val icebergPostgresCatalogPassword: String = + conf.getString("storage.iceberg.catalog.postgres.password") + + // Iceberg Table specifics + val icebergTableNamespace: String = conf.getString("storage.iceberg.table.namespace") + val icebergTableCommitBatchSize: Int = + conf.getInt("storage.iceberg.table.commit.batch-size") + val icebergTableCommitNumRetries: Int = + conf.getInt("storage.iceberg.table.commit.retry.num-retries") + val icebergTableCommitMinRetryWaitMs: Int = + conf.getInt("storage.iceberg.table.commit.retry.min-wait-ms") + val icebergTableCommitMaxRetryWaitMs: Int = + conf.getInt("storage.iceberg.table.commit.retry.max-wait-ms") // File storage configurations val fileStorageDirectoryPath: Path = diff --git a/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/result/iceberg/IcebergDocument.scala b/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/result/iceberg/IcebergDocument.scala index 862c953b2eb..88530aa4c8c 100644 --- a/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/result/iceberg/IcebergDocument.scala +++ b/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/result/iceberg/IcebergDocument.scala @@ -12,7 +12,6 @@ import org.apache.iceberg.types.{Conversions, Types} import java.net.URI import java.util.concurrent.locks.{ReentrantLock, ReentrantReadWriteLock} -import scala.collection.mutable import scala.jdk.CollectionConverters._ import java.nio.ByteBuffer import java.time.{Instant, LocalDate, ZoneOffset} diff --git a/core/workflow-core/src/main/scala/edu/uci/ics/amber/util/PathUtils.scala b/core/workflow-core/src/main/scala/edu/uci/ics/amber/util/PathUtils.scala index 7fe89d0e8ce..0af07eb0905 100644 --- a/core/workflow-core/src/main/scala/edu/uci/ics/amber/util/PathUtils.scala +++ b/core/workflow-core/src/main/scala/edu/uci/ics/amber/util/PathUtils.scala @@ -39,6 +39,9 @@ object PathUtils { lazy val workflowCompilingServicePath: Path = corePath.resolve("workflow-compiling-service") + lazy val workflowComputingUnitManagingServicePath: Path = + corePath.resolve("workflow-computing-unit-managing-service") + private lazy val datasetsRootPath = corePath.resolve("amber").resolve("user-resources").resolve("datasets") diff --git a/core/workflow-core/src/test/scala/edu/uci/ics/amber/storage/result/iceberg/IcebergTableStatsSpec.scala b/core/workflow-core/src/test/scala/edu/uci/ics/amber/storage/result/iceberg/IcebergTableStatsSpec.scala index 2aca3408c0e..08d618309fe 100644 --- a/core/workflow-core/src/test/scala/edu/uci/ics/amber/storage/result/iceberg/IcebergTableStatsSpec.scala +++ b/core/workflow-core/src/test/scala/edu/uci/ics/amber/storage/result/iceberg/IcebergTableStatsSpec.scala @@ -18,10 +18,9 @@ import org.scalatest.{BeforeAndAfterAll, Suite} import java.net.URI import java.sql.Timestamp -import java.time.{Instant, LocalDate, ZoneId, ZoneOffset} +import java.time.{LocalDate, ZoneId} import java.time.format.DateTimeFormatter import java.util.UUID -import scala.jdk.CollectionConverters._ class IcebergTableStatsSpec extends AnyFlatSpec with BeforeAndAfterAll with Suite { diff --git a/core/workflow-operator/src/main/scala/edu/uci/ics/amber/operator/visualization/networkGraph/NetworkGraphOpDesc.scala b/core/workflow-operator/src/main/scala/edu/uci/ics/amber/operator/visualization/networkGraph/NetworkGraphOpDesc.scala index 4f57d167d53..5e9682bec19 100644 --- a/core/workflow-operator/src/main/scala/edu/uci/ics/amber/operator/visualization/networkGraph/NetworkGraphOpDesc.scala +++ b/core/workflow-operator/src/main/scala/edu/uci/ics/amber/operator/visualization/networkGraph/NetworkGraphOpDesc.scala @@ -1,7 +1,7 @@ package edu.uci.ics.amber.operator.visualization.networkGraph import com.fasterxml.jackson.annotation.{JsonProperty, JsonPropertyDescription} -import com.kjetland.jackson.jsonSchema.annotations.{JsonSchemaInject, JsonSchemaTitle} +import com.kjetland.jackson.jsonSchema.annotations.JsonSchemaTitle import edu.uci.ics.amber.core.tuple.{AttributeType, Schema} import edu.uci.ics.amber.core.workflow.OutputPort.OutputMode import edu.uci.ics.amber.core.workflow.{InputPort, OutputPort, PortIdentity} diff --git a/diabetes.csv b/diabetes.csv new file mode 100644 index 00000000000..9e6a36214b9 --- /dev/null +++ b/diabetes.csv @@ -0,0 +1,769 @@ +Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age,Outcome +6,148,72,35,0,33.6,0.627,50,1 +1,85,66,29,0,26.6,0.351,31,0 +8,183,64,0,0,23.3,0.672,32,1 +1,89,66,23,94,28.1,0.167,21,0 +0,137,40,35,168,43.1,2.288,33,1 +5,116,74,0,0,25.6,0.201,30,0 +3,78,50,32,88,31,0.248,26,1 +10,115,0,0,0,35.3,0.134,29,0 +2,197,70,45,543,30.5,0.158,53,1 +8,125,96,0,0,0,0.232,54,1 +4,110,92,0,0,37.6,0.191,30,0 +10,168,74,0,0,38,0.537,34,1 +10,139,80,0,0,27.1,1.441,57,0 +1,189,60,23,846,30.1,0.398,59,1 +5,166,72,19,175,25.8,0.587,51,1 +7,100,0,0,0,30,0.484,32,1 +0,118,84,47,230,45.8,0.551,31,1 +7,107,74,0,0,29.6,0.254,31,1 +1,103,30,38,83,43.3,0.183,33,0 +1,115,70,30,96,34.6,0.529,32,1 +3,126,88,41,235,39.3,0.704,27,0 +8,99,84,0,0,35.4,0.388,50,0 +7,196,90,0,0,39.8,0.451,41,1 +9,119,80,35,0,29,0.263,29,1 +11,143,94,33,146,36.6,0.254,51,1 +10,125,70,26,115,31.1,0.205,41,1 +7,147,76,0,0,39.4,0.257,43,1 +1,97,66,15,140,23.2,0.487,22,0 +13,145,82,19,110,22.2,0.245,57,0 +5,117,92,0,0,34.1,0.337,38,0 +5,109,75,26,0,36,0.546,60,0 +3,158,76,36,245,31.6,0.851,28,1 +3,88,58,11,54,24.8,0.267,22,0 +6,92,92,0,0,19.9,0.188,28,0 +10,122,78,31,0,27.6,0.512,45,0 +4,103,60,33,192,24,0.966,33,0 +11,138,76,0,0,33.2,0.42,35,0 +9,102,76,37,0,32.9,0.665,46,1 +2,90,68,42,0,38.2,0.503,27,1 +4,111,72,47,207,37.1,1.39,56,1 +3,180,64,25,70,34,0.271,26,0 +7,133,84,0,0,40.2,0.696,37,0 +7,106,92,18,0,22.7,0.235,48,0 +9,171,110,24,240,45.4,0.721,54,1 +7,159,64,0,0,27.4,0.294,40,0 +0,180,66,39,0,42,1.893,25,1 +1,146,56,0,0,29.7,0.564,29,0 +2,71,70,27,0,28,0.586,22,0 +7,103,66,32,0,39.1,0.344,31,1 +7,105,0,0,0,0,0.305,24,0 +1,103,80,11,82,19.4,0.491,22,0 +1,101,50,15,36,24.2,0.526,26,0 +5,88,66,21,23,24.4,0.342,30,0 +8,176,90,34,300,33.7,0.467,58,1 +7,150,66,42,342,34.7,0.718,42,0 +1,73,50,10,0,23,0.248,21,0 +7,187,68,39,304,37.7,0.254,41,1 +0,100,88,60,110,46.8,0.962,31,0 +0,146,82,0,0,40.5,1.781,44,0 +0,105,64,41,142,41.5,0.173,22,0 +2,84,0,0,0,0,0.304,21,0 +8,133,72,0,0,32.9,0.27,39,1 +5,44,62,0,0,25,0.587,36,0 +2,141,58,34,128,25.4,0.699,24,0 +7,114,66,0,0,32.8,0.258,42,1 +5,99,74,27,0,29,0.203,32,0 +0,109,88,30,0,32.5,0.855,38,1 +2,109,92,0,0,42.7,0.845,54,0 +1,95,66,13,38,19.6,0.334,25,0 +4,146,85,27,100,28.9,0.189,27,0 +2,100,66,20,90,32.9,0.867,28,1 +5,139,64,35,140,28.6,0.411,26,0 +13,126,90,0,0,43.4,0.583,42,1 +4,129,86,20,270,35.1,0.231,23,0 +1,79,75,30,0,32,0.396,22,0 +1,0,48,20,0,24.7,0.14,22,0 +7,62,78,0,0,32.6,0.391,41,0 +5,95,72,33,0,37.7,0.37,27,0 +0,131,0,0,0,43.2,0.27,26,1 +2,112,66,22,0,25,0.307,24,0 +3,113,44,13,0,22.4,0.14,22,0 +2,74,0,0,0,0,0.102,22,0 +7,83,78,26,71,29.3,0.767,36,0 +0,101,65,28,0,24.6,0.237,22,0 +5,137,108,0,0,48.8,0.227,37,1 +2,110,74,29,125,32.4,0.698,27,0 +13,106,72,54,0,36.6,0.178,45,0 +2,100,68,25,71,38.5,0.324,26,0 +15,136,70,32,110,37.1,0.153,43,1 +1,107,68,19,0,26.5,0.165,24,0 +1,80,55,0,0,19.1,0.258,21,0 +4,123,80,15,176,32,0.443,34,0 +7,81,78,40,48,46.7,0.261,42,0 +4,134,72,0,0,23.8,0.277,60,1 +2,142,82,18,64,24.7,0.761,21,0 +6,144,72,27,228,33.9,0.255,40,0 +2,92,62,28,0,31.6,0.13,24,0 +1,71,48,18,76,20.4,0.323,22,0 +6,93,50,30,64,28.7,0.356,23,0 +1,122,90,51,220,49.7,0.325,31,1 +1,163,72,0,0,39,1.222,33,1 +1,151,60,0,0,26.1,0.179,22,0 +0,125,96,0,0,22.5,0.262,21,0 +1,81,72,18,40,26.6,0.283,24,0 +2,85,65,0,0,39.6,0.93,27,0 +1,126,56,29,152,28.7,0.801,21,0 +1,96,122,0,0,22.4,0.207,27,0 +4,144,58,28,140,29.5,0.287,37,0 +3,83,58,31,18,34.3,0.336,25,0 +0,95,85,25,36,37.4,0.247,24,1 +3,171,72,33,135,33.3,0.199,24,1 +8,155,62,26,495,34,0.543,46,1 +1,89,76,34,37,31.2,0.192,23,0 +4,76,62,0,0,34,0.391,25,0 +7,160,54,32,175,30.5,0.588,39,1 +4,146,92,0,0,31.2,0.539,61,1 +5,124,74,0,0,34,0.22,38,1 +5,78,48,0,0,33.7,0.654,25,0 +4,97,60,23,0,28.2,0.443,22,0 +4,99,76,15,51,23.2,0.223,21,0 +0,162,76,56,100,53.2,0.759,25,1 +6,111,64,39,0,34.2,0.26,24,0 +2,107,74,30,100,33.6,0.404,23,0 +5,132,80,0,0,26.8,0.186,69,0 +0,113,76,0,0,33.3,0.278,23,1 +1,88,30,42,99,55,0.496,26,1 +3,120,70,30,135,42.9,0.452,30,0 +1,118,58,36,94,33.3,0.261,23,0 +1,117,88,24,145,34.5,0.403,40,1 +0,105,84,0,0,27.9,0.741,62,1 +4,173,70,14,168,29.7,0.361,33,1 +9,122,56,0,0,33.3,1.114,33,1 +3,170,64,37,225,34.5,0.356,30,1 +8,84,74,31,0,38.3,0.457,39,0 +2,96,68,13,49,21.1,0.647,26,0 +2,125,60,20,140,33.8,0.088,31,0 +0,100,70,26,50,30.8,0.597,21,0 +0,93,60,25,92,28.7,0.532,22,0 +0,129,80,0,0,31.2,0.703,29,0 +5,105,72,29,325,36.9,0.159,28,0 +3,128,78,0,0,21.1,0.268,55,0 +5,106,82,30,0,39.5,0.286,38,0 +2,108,52,26,63,32.5,0.318,22,0 +10,108,66,0,0,32.4,0.272,42,1 +4,154,62,31,284,32.8,0.237,23,0 +0,102,75,23,0,0,0.572,21,0 +9,57,80,37,0,32.8,0.096,41,0 +2,106,64,35,119,30.5,1.4,34,0 +5,147,78,0,0,33.7,0.218,65,0 +2,90,70,17,0,27.3,0.085,22,0 +1,136,74,50,204,37.4,0.399,24,0 +4,114,65,0,0,21.9,0.432,37,0 +9,156,86,28,155,34.3,1.189,42,1 +1,153,82,42,485,40.6,0.687,23,0 +8,188,78,0,0,47.9,0.137,43,1 +7,152,88,44,0,50,0.337,36,1 +2,99,52,15,94,24.6,0.637,21,0 +1,109,56,21,135,25.2,0.833,23,0 +2,88,74,19,53,29,0.229,22,0 +17,163,72,41,114,40.9,0.817,47,1 +4,151,90,38,0,29.7,0.294,36,0 +7,102,74,40,105,37.2,0.204,45,0 +0,114,80,34,285,44.2,0.167,27,0 +2,100,64,23,0,29.7,0.368,21,0 +0,131,88,0,0,31.6,0.743,32,1 +6,104,74,18,156,29.9,0.722,41,1 +3,148,66,25,0,32.5,0.256,22,0 +4,120,68,0,0,29.6,0.709,34,0 +4,110,66,0,0,31.9,0.471,29,0 +3,111,90,12,78,28.4,0.495,29,0 +6,102,82,0,0,30.8,0.18,36,1 +6,134,70,23,130,35.4,0.542,29,1 +2,87,0,23,0,28.9,0.773,25,0 +1,79,60,42,48,43.5,0.678,23,0 +2,75,64,24,55,29.7,0.37,33,0 +8,179,72,42,130,32.7,0.719,36,1 +6,85,78,0,0,31.2,0.382,42,0 +0,129,110,46,130,67.1,0.319,26,1 +5,143,78,0,0,45,0.19,47,0 +5,130,82,0,0,39.1,0.956,37,1 +6,87,80,0,0,23.2,0.084,32,0 +0,119,64,18,92,34.9,0.725,23,0 +1,0,74,20,23,27.7,0.299,21,0 +5,73,60,0,0,26.8,0.268,27,0 +4,141,74,0,0,27.6,0.244,40,0 +7,194,68,28,0,35.9,0.745,41,1 +8,181,68,36,495,30.1,0.615,60,1 +1,128,98,41,58,32,1.321,33,1 +8,109,76,39,114,27.9,0.64,31,1 +5,139,80,35,160,31.6,0.361,25,1 +3,111,62,0,0,22.6,0.142,21,0 +9,123,70,44,94,33.1,0.374,40,0 +7,159,66,0,0,30.4,0.383,36,1 +11,135,0,0,0,52.3,0.578,40,1 +8,85,55,20,0,24.4,0.136,42,0 +5,158,84,41,210,39.4,0.395,29,1 +1,105,58,0,0,24.3,0.187,21,0 +3,107,62,13,48,22.9,0.678,23,1 +4,109,64,44,99,34.8,0.905,26,1 +4,148,60,27,318,30.9,0.15,29,1 +0,113,80,16,0,31,0.874,21,0 +1,138,82,0,0,40.1,0.236,28,0 +0,108,68,20,0,27.3,0.787,32,0 +2,99,70,16,44,20.4,0.235,27,0 +6,103,72,32,190,37.7,0.324,55,0 +5,111,72,28,0,23.9,0.407,27,0 +8,196,76,29,280,37.5,0.605,57,1 +5,162,104,0,0,37.7,0.151,52,1 +1,96,64,27,87,33.2,0.289,21,0 +7,184,84,33,0,35.5,0.355,41,1 +2,81,60,22,0,27.7,0.29,25,0 +0,147,85,54,0,42.8,0.375,24,0 +7,179,95,31,0,34.2,0.164,60,0 +0,140,65,26,130,42.6,0.431,24,1 +9,112,82,32,175,34.2,0.26,36,1 +12,151,70,40,271,41.8,0.742,38,1 +5,109,62,41,129,35.8,0.514,25,1 +6,125,68,30,120,30,0.464,32,0 +5,85,74,22,0,29,1.224,32,1 +5,112,66,0,0,37.8,0.261,41,1 +0,177,60,29,478,34.6,1.072,21,1 +2,158,90,0,0,31.6,0.805,66,1 +7,119,0,0,0,25.2,0.209,37,0 +7,142,60,33,190,28.8,0.687,61,0 +1,100,66,15,56,23.6,0.666,26,0 +1,87,78,27,32,34.6,0.101,22,0 +0,101,76,0,0,35.7,0.198,26,0 +3,162,52,38,0,37.2,0.652,24,1 +4,197,70,39,744,36.7,2.329,31,0 +0,117,80,31,53,45.2,0.089,24,0 +4,142,86,0,0,44,0.645,22,1 +6,134,80,37,370,46.2,0.238,46,1 +1,79,80,25,37,25.4,0.583,22,0 +4,122,68,0,0,35,0.394,29,0 +3,74,68,28,45,29.7,0.293,23,0 +4,171,72,0,0,43.6,0.479,26,1 +7,181,84,21,192,35.9,0.586,51,1 +0,179,90,27,0,44.1,0.686,23,1 +9,164,84,21,0,30.8,0.831,32,1 +0,104,76,0,0,18.4,0.582,27,0 +1,91,64,24,0,29.2,0.192,21,0 +4,91,70,32,88,33.1,0.446,22,0 +3,139,54,0,0,25.6,0.402,22,1 +6,119,50,22,176,27.1,1.318,33,1 +2,146,76,35,194,38.2,0.329,29,0 +9,184,85,15,0,30,1.213,49,1 +10,122,68,0,0,31.2,0.258,41,0 +0,165,90,33,680,52.3,0.427,23,0 +9,124,70,33,402,35.4,0.282,34,0 +1,111,86,19,0,30.1,0.143,23,0 +9,106,52,0,0,31.2,0.38,42,0 +2,129,84,0,0,28,0.284,27,0 +2,90,80,14,55,24.4,0.249,24,0 +0,86,68,32,0,35.8,0.238,25,0 +12,92,62,7,258,27.6,0.926,44,1 +1,113,64,35,0,33.6,0.543,21,1 +3,111,56,39,0,30.1,0.557,30,0 +2,114,68,22,0,28.7,0.092,25,0 +1,193,50,16,375,25.9,0.655,24,0 +11,155,76,28,150,33.3,1.353,51,1 +3,191,68,15,130,30.9,0.299,34,0 +3,141,0,0,0,30,0.761,27,1 +4,95,70,32,0,32.1,0.612,24,0 +3,142,80,15,0,32.4,0.2,63,0 +4,123,62,0,0,32,0.226,35,1 +5,96,74,18,67,33.6,0.997,43,0 +0,138,0,0,0,36.3,0.933,25,1 +2,128,64,42,0,40,1.101,24,0 +0,102,52,0,0,25.1,0.078,21,0 +2,146,0,0,0,27.5,0.24,28,1 +10,101,86,37,0,45.6,1.136,38,1 +2,108,62,32,56,25.2,0.128,21,0 +3,122,78,0,0,23,0.254,40,0 +1,71,78,50,45,33.2,0.422,21,0 +13,106,70,0,0,34.2,0.251,52,0 +2,100,70,52,57,40.5,0.677,25,0 +7,106,60,24,0,26.5,0.296,29,1 +0,104,64,23,116,27.8,0.454,23,0 +5,114,74,0,0,24.9,0.744,57,0 +2,108,62,10,278,25.3,0.881,22,0 +0,146,70,0,0,37.9,0.334,28,1 +10,129,76,28,122,35.9,0.28,39,0 +7,133,88,15,155,32.4,0.262,37,0 +7,161,86,0,0,30.4,0.165,47,1 +2,108,80,0,0,27,0.259,52,1 +7,136,74,26,135,26,0.647,51,0 +5,155,84,44,545,38.7,0.619,34,0 +1,119,86,39,220,45.6,0.808,29,1 +4,96,56,17,49,20.8,0.34,26,0 +5,108,72,43,75,36.1,0.263,33,0 +0,78,88,29,40,36.9,0.434,21,0 +0,107,62,30,74,36.6,0.757,25,1 +2,128,78,37,182,43.3,1.224,31,1 +1,128,48,45,194,40.5,0.613,24,1 +0,161,50,0,0,21.9,0.254,65,0 +6,151,62,31,120,35.5,0.692,28,0 +2,146,70,38,360,28,0.337,29,1 +0,126,84,29,215,30.7,0.52,24,0 +14,100,78,25,184,36.6,0.412,46,1 +8,112,72,0,0,23.6,0.84,58,0 +0,167,0,0,0,32.3,0.839,30,1 +2,144,58,33,135,31.6,0.422,25,1 +5,77,82,41,42,35.8,0.156,35,0 +5,115,98,0,0,52.9,0.209,28,1 +3,150,76,0,0,21,0.207,37,0 +2,120,76,37,105,39.7,0.215,29,0 +10,161,68,23,132,25.5,0.326,47,1 +0,137,68,14,148,24.8,0.143,21,0 +0,128,68,19,180,30.5,1.391,25,1 +2,124,68,28,205,32.9,0.875,30,1 +6,80,66,30,0,26.2,0.313,41,0 +0,106,70,37,148,39.4,0.605,22,0 +2,155,74,17,96,26.6,0.433,27,1 +3,113,50,10,85,29.5,0.626,25,0 +7,109,80,31,0,35.9,1.127,43,1 +2,112,68,22,94,34.1,0.315,26,0 +3,99,80,11,64,19.3,0.284,30,0 +3,182,74,0,0,30.5,0.345,29,1 +3,115,66,39,140,38.1,0.15,28,0 +6,194,78,0,0,23.5,0.129,59,1 +4,129,60,12,231,27.5,0.527,31,0 +3,112,74,30,0,31.6,0.197,25,1 +0,124,70,20,0,27.4,0.254,36,1 +13,152,90,33,29,26.8,0.731,43,1 +2,112,75,32,0,35.7,0.148,21,0 +1,157,72,21,168,25.6,0.123,24,0 +1,122,64,32,156,35.1,0.692,30,1 +10,179,70,0,0,35.1,0.2,37,0 +2,102,86,36,120,45.5,0.127,23,1 +6,105,70,32,68,30.8,0.122,37,0 +8,118,72,19,0,23.1,1.476,46,0 +2,87,58,16,52,32.7,0.166,25,0 +1,180,0,0,0,43.3,0.282,41,1 +12,106,80,0,0,23.6,0.137,44,0 +1,95,60,18,58,23.9,0.26,22,0 +0,165,76,43,255,47.9,0.259,26,0 +0,117,0,0,0,33.8,0.932,44,0 +5,115,76,0,0,31.2,0.343,44,1 +9,152,78,34,171,34.2,0.893,33,1 +7,178,84,0,0,39.9,0.331,41,1 +1,130,70,13,105,25.9,0.472,22,0 +1,95,74,21,73,25.9,0.673,36,0 +1,0,68,35,0,32,0.389,22,0 +5,122,86,0,0,34.7,0.29,33,0 +8,95,72,0,0,36.8,0.485,57,0 +8,126,88,36,108,38.5,0.349,49,0 +1,139,46,19,83,28.7,0.654,22,0 +3,116,0,0,0,23.5,0.187,23,0 +3,99,62,19,74,21.8,0.279,26,0 +5,0,80,32,0,41,0.346,37,1 +4,92,80,0,0,42.2,0.237,29,0 +4,137,84,0,0,31.2,0.252,30,0 +3,61,82,28,0,34.4,0.243,46,0 +1,90,62,12,43,27.2,0.58,24,0 +3,90,78,0,0,42.7,0.559,21,0 +9,165,88,0,0,30.4,0.302,49,1 +1,125,50,40,167,33.3,0.962,28,1 +13,129,0,30,0,39.9,0.569,44,1 +12,88,74,40,54,35.3,0.378,48,0 +1,196,76,36,249,36.5,0.875,29,1 +5,189,64,33,325,31.2,0.583,29,1 +5,158,70,0,0,29.8,0.207,63,0 +5,103,108,37,0,39.2,0.305,65,0 +4,146,78,0,0,38.5,0.52,67,1 +4,147,74,25,293,34.9,0.385,30,0 +5,99,54,28,83,34,0.499,30,0 +6,124,72,0,0,27.6,0.368,29,1 +0,101,64,17,0,21,0.252,21,0 +3,81,86,16,66,27.5,0.306,22,0 +1,133,102,28,140,32.8,0.234,45,1 +3,173,82,48,465,38.4,2.137,25,1 +0,118,64,23,89,0,1.731,21,0 +0,84,64,22,66,35.8,0.545,21,0 +2,105,58,40,94,34.9,0.225,25,0 +2,122,52,43,158,36.2,0.816,28,0 +12,140,82,43,325,39.2,0.528,58,1 +0,98,82,15,84,25.2,0.299,22,0 +1,87,60,37,75,37.2,0.509,22,0 +4,156,75,0,0,48.3,0.238,32,1 +0,93,100,39,72,43.4,1.021,35,0 +1,107,72,30,82,30.8,0.821,24,0 +0,105,68,22,0,20,0.236,22,0 +1,109,60,8,182,25.4,0.947,21,0 +1,90,62,18,59,25.1,1.268,25,0 +1,125,70,24,110,24.3,0.221,25,0 +1,119,54,13,50,22.3,0.205,24,0 +5,116,74,29,0,32.3,0.66,35,1 +8,105,100,36,0,43.3,0.239,45,1 +5,144,82,26,285,32,0.452,58,1 +3,100,68,23,81,31.6,0.949,28,0 +1,100,66,29,196,32,0.444,42,0 +5,166,76,0,0,45.7,0.34,27,1 +1,131,64,14,415,23.7,0.389,21,0 +4,116,72,12,87,22.1,0.463,37,0 +4,158,78,0,0,32.9,0.803,31,1 +2,127,58,24,275,27.7,1.6,25,0 +3,96,56,34,115,24.7,0.944,39,0 +0,131,66,40,0,34.3,0.196,22,1 +3,82,70,0,0,21.1,0.389,25,0 +3,193,70,31,0,34.9,0.241,25,1 +4,95,64,0,0,32,0.161,31,1 +6,137,61,0,0,24.2,0.151,55,0 +5,136,84,41,88,35,0.286,35,1 +9,72,78,25,0,31.6,0.28,38,0 +5,168,64,0,0,32.9,0.135,41,1 +2,123,48,32,165,42.1,0.52,26,0 +4,115,72,0,0,28.9,0.376,46,1 +0,101,62,0,0,21.9,0.336,25,0 +8,197,74,0,0,25.9,1.191,39,1 +1,172,68,49,579,42.4,0.702,28,1 +6,102,90,39,0,35.7,0.674,28,0 +1,112,72,30,176,34.4,0.528,25,0 +1,143,84,23,310,42.4,1.076,22,0 +1,143,74,22,61,26.2,0.256,21,0 +0,138,60,35,167,34.6,0.534,21,1 +3,173,84,33,474,35.7,0.258,22,1 +1,97,68,21,0,27.2,1.095,22,0 +4,144,82,32,0,38.5,0.554,37,1 +1,83,68,0,0,18.2,0.624,27,0 +3,129,64,29,115,26.4,0.219,28,1 +1,119,88,41,170,45.3,0.507,26,0 +2,94,68,18,76,26,0.561,21,0 +0,102,64,46,78,40.6,0.496,21,0 +2,115,64,22,0,30.8,0.421,21,0 +8,151,78,32,210,42.9,0.516,36,1 +4,184,78,39,277,37,0.264,31,1 +0,94,0,0,0,0,0.256,25,0 +1,181,64,30,180,34.1,0.328,38,1 +0,135,94,46,145,40.6,0.284,26,0 +1,95,82,25,180,35,0.233,43,1 +2,99,0,0,0,22.2,0.108,23,0 +3,89,74,16,85,30.4,0.551,38,0 +1,80,74,11,60,30,0.527,22,0 +2,139,75,0,0,25.6,0.167,29,0 +1,90,68,8,0,24.5,1.138,36,0 +0,141,0,0,0,42.4,0.205,29,1 +12,140,85,33,0,37.4,0.244,41,0 +5,147,75,0,0,29.9,0.434,28,0 +1,97,70,15,0,18.2,0.147,21,0 +6,107,88,0,0,36.8,0.727,31,0 +0,189,104,25,0,34.3,0.435,41,1 +2,83,66,23,50,32.2,0.497,22,0 +4,117,64,27,120,33.2,0.23,24,0 +8,108,70,0,0,30.5,0.955,33,1 +4,117,62,12,0,29.7,0.38,30,1 +0,180,78,63,14,59.4,2.42,25,1 +1,100,72,12,70,25.3,0.658,28,0 +0,95,80,45,92,36.5,0.33,26,0 +0,104,64,37,64,33.6,0.51,22,1 +0,120,74,18,63,30.5,0.285,26,0 +1,82,64,13,95,21.2,0.415,23,0 +2,134,70,0,0,28.9,0.542,23,1 +0,91,68,32,210,39.9,0.381,25,0 +2,119,0,0,0,19.6,0.832,72,0 +2,100,54,28,105,37.8,0.498,24,0 +14,175,62,30,0,33.6,0.212,38,1 +1,135,54,0,0,26.7,0.687,62,0 +5,86,68,28,71,30.2,0.364,24,0 +10,148,84,48,237,37.6,1.001,51,1 +9,134,74,33,60,25.9,0.46,81,0 +9,120,72,22,56,20.8,0.733,48,0 +1,71,62,0,0,21.8,0.416,26,0 +8,74,70,40,49,35.3,0.705,39,0 +5,88,78,30,0,27.6,0.258,37,0 +10,115,98,0,0,24,1.022,34,0 +0,124,56,13,105,21.8,0.452,21,0 +0,74,52,10,36,27.8,0.269,22,0 +0,97,64,36,100,36.8,0.6,25,0 +8,120,0,0,0,30,0.183,38,1 +6,154,78,41,140,46.1,0.571,27,0 +1,144,82,40,0,41.3,0.607,28,0 +0,137,70,38,0,33.2,0.17,22,0 +0,119,66,27,0,38.8,0.259,22,0 +7,136,90,0,0,29.9,0.21,50,0 +4,114,64,0,0,28.9,0.126,24,0 +0,137,84,27,0,27.3,0.231,59,0 +2,105,80,45,191,33.7,0.711,29,1 +7,114,76,17,110,23.8,0.466,31,0 +8,126,74,38,75,25.9,0.162,39,0 +4,132,86,31,0,28,0.419,63,0 +3,158,70,30,328,35.5,0.344,35,1 +0,123,88,37,0,35.2,0.197,29,0 +4,85,58,22,49,27.8,0.306,28,0 +0,84,82,31,125,38.2,0.233,23,0 +0,145,0,0,0,44.2,0.63,31,1 +0,135,68,42,250,42.3,0.365,24,1 +1,139,62,41,480,40.7,0.536,21,0 +0,173,78,32,265,46.5,1.159,58,0 +4,99,72,17,0,25.6,0.294,28,0 +8,194,80,0,0,26.1,0.551,67,0 +2,83,65,28,66,36.8,0.629,24,0 +2,89,90,30,0,33.5,0.292,42,0 +4,99,68,38,0,32.8,0.145,33,0 +4,125,70,18,122,28.9,1.144,45,1 +3,80,0,0,0,0,0.174,22,0 +6,166,74,0,0,26.6,0.304,66,0 +5,110,68,0,0,26,0.292,30,0 +2,81,72,15,76,30.1,0.547,25,0 +7,195,70,33,145,25.1,0.163,55,1 +6,154,74,32,193,29.3,0.839,39,0 +2,117,90,19,71,25.2,0.313,21,0 +3,84,72,32,0,37.2,0.267,28,0 +6,0,68,41,0,39,0.727,41,1 +7,94,64,25,79,33.3,0.738,41,0 +3,96,78,39,0,37.3,0.238,40,0 +10,75,82,0,0,33.3,0.263,38,0 +0,180,90,26,90,36.5,0.314,35,1 +1,130,60,23,170,28.6,0.692,21,0 +2,84,50,23,76,30.4,0.968,21,0 +8,120,78,0,0,25,0.409,64,0 +12,84,72,31,0,29.7,0.297,46,1 +0,139,62,17,210,22.1,0.207,21,0 +9,91,68,0,0,24.2,0.2,58,0 +2,91,62,0,0,27.3,0.525,22,0 +3,99,54,19,86,25.6,0.154,24,0 +3,163,70,18,105,31.6,0.268,28,1 +9,145,88,34,165,30.3,0.771,53,1 +7,125,86,0,0,37.6,0.304,51,0 +13,76,60,0,0,32.8,0.18,41,0 +6,129,90,7,326,19.6,0.582,60,0 +2,68,70,32,66,25,0.187,25,0 +3,124,80,33,130,33.2,0.305,26,0 +6,114,0,0,0,0,0.189,26,0 +9,130,70,0,0,34.2,0.652,45,1 +3,125,58,0,0,31.6,0.151,24,0 +3,87,60,18,0,21.8,0.444,21,0 +1,97,64,19,82,18.2,0.299,21,0 +3,116,74,15,105,26.3,0.107,24,0 +0,117,66,31,188,30.8,0.493,22,0 +0,111,65,0,0,24.6,0.66,31,0 +2,122,60,18,106,29.8,0.717,22,0 +0,107,76,0,0,45.3,0.686,24,0 +1,86,66,52,65,41.3,0.917,29,0 +6,91,0,0,0,29.8,0.501,31,0 +1,77,56,30,56,33.3,1.251,24,0 +4,132,0,0,0,32.9,0.302,23,1 +0,105,90,0,0,29.6,0.197,46,0 +0,57,60,0,0,21.7,0.735,67,0 +0,127,80,37,210,36.3,0.804,23,0 +3,129,92,49,155,36.4,0.968,32,1 +8,100,74,40,215,39.4,0.661,43,1 +3,128,72,25,190,32.4,0.549,27,1 +10,90,85,32,0,34.9,0.825,56,1 +4,84,90,23,56,39.5,0.159,25,0 +1,88,78,29,76,32,0.365,29,0 +8,186,90,35,225,34.5,0.423,37,1 +5,187,76,27,207,43.6,1.034,53,1 +4,131,68,21,166,33.1,0.16,28,0 +1,164,82,43,67,32.8,0.341,50,0 +4,189,110,31,0,28.5,0.68,37,0 +1,116,70,28,0,27.4,0.204,21,0 +3,84,68,30,106,31.9,0.591,25,0 +6,114,88,0,0,27.8,0.247,66,0 +1,88,62,24,44,29.9,0.422,23,0 +1,84,64,23,115,36.9,0.471,28,0 +7,124,70,33,215,25.5,0.161,37,0 +1,97,70,40,0,38.1,0.218,30,0 +8,110,76,0,0,27.8,0.237,58,0 +11,103,68,40,0,46.2,0.126,42,0 +11,85,74,0,0,30.1,0.3,35,0 +6,125,76,0,0,33.8,0.121,54,1 +0,198,66,32,274,41.3,0.502,28,1 +1,87,68,34,77,37.6,0.401,24,0 +6,99,60,19,54,26.9,0.497,32,0 +0,91,80,0,0,32.4,0.601,27,0 +2,95,54,14,88,26.1,0.748,22,0 +1,99,72,30,18,38.6,0.412,21,0 +6,92,62,32,126,32,0.085,46,0 +4,154,72,29,126,31.3,0.338,37,0 +0,121,66,30,165,34.3,0.203,33,1 +3,78,70,0,0,32.5,0.27,39,0 +2,130,96,0,0,22.6,0.268,21,0 +3,111,58,31,44,29.5,0.43,22,0 +2,98,60,17,120,34.7,0.198,22,0 +1,143,86,30,330,30.1,0.892,23,0 +1,119,44,47,63,35.5,0.28,25,0 +6,108,44,20,130,24,0.813,35,0 +2,118,80,0,0,42.9,0.693,21,1 +10,133,68,0,0,27,0.245,36,0 +2,197,70,99,0,34.7,0.575,62,1 +0,151,90,46,0,42.1,0.371,21,1 +6,109,60,27,0,25,0.206,27,0 +12,121,78,17,0,26.5,0.259,62,0 +8,100,76,0,0,38.7,0.19,42,0 +8,124,76,24,600,28.7,0.687,52,1 +1,93,56,11,0,22.5,0.417,22,0 +8,143,66,0,0,34.9,0.129,41,1 +6,103,66,0,0,24.3,0.249,29,0 +3,176,86,27,156,33.3,1.154,52,1 +0,73,0,0,0,21.1,0.342,25,0 +11,111,84,40,0,46.8,0.925,45,1 +2,112,78,50,140,39.4,0.175,24,0 +3,132,80,0,0,34.4,0.402,44,1 +2,82,52,22,115,28.5,1.699,25,0 +6,123,72,45,230,33.6,0.733,34,0 +0,188,82,14,185,32,0.682,22,1 +0,67,76,0,0,45.3,0.194,46,0 +1,89,24,19,25,27.8,0.559,21,0 +1,173,74,0,0,36.8,0.088,38,1 +1,109,38,18,120,23.1,0.407,26,0 +1,108,88,19,0,27.1,0.4,24,0 +6,96,0,0,0,23.7,0.19,28,0 +1,124,74,36,0,27.8,0.1,30,0 +7,150,78,29,126,35.2,0.692,54,1 +4,183,0,0,0,28.4,0.212,36,1 +1,124,60,32,0,35.8,0.514,21,0 +1,181,78,42,293,40,1.258,22,1 +1,92,62,25,41,19.5,0.482,25,0 +0,152,82,39,272,41.5,0.27,27,0 +1,111,62,13,182,24,0.138,23,0 +3,106,54,21,158,30.9,0.292,24,0 +3,174,58,22,194,32.9,0.593,36,1 +7,168,88,42,321,38.2,0.787,40,1 +6,105,80,28,0,32.5,0.878,26,0 +11,138,74,26,144,36.1,0.557,50,1 +3,106,72,0,0,25.8,0.207,27,0 +6,117,96,0,0,28.7,0.157,30,0 +2,68,62,13,15,20.1,0.257,23,0 +9,112,82,24,0,28.2,1.282,50,1 +0,119,0,0,0,32.4,0.141,24,1 +2,112,86,42,160,38.4,0.246,28,0 +2,92,76,20,0,24.2,1.698,28,0 +6,183,94,0,0,40.8,1.461,45,0 +0,94,70,27,115,43.5,0.347,21,0 +2,108,64,0,0,30.8,0.158,21,0 +4,90,88,47,54,37.7,0.362,29,0 +0,125,68,0,0,24.7,0.206,21,0 +0,132,78,0,0,32.4,0.393,21,0 +5,128,80,0,0,34.6,0.144,45,0 +4,94,65,22,0,24.7,0.148,21,0 +7,114,64,0,0,27.4,0.732,34,1 +0,102,78,40,90,34.5,0.238,24,0 +2,111,60,0,0,26.2,0.343,23,0 +1,128,82,17,183,27.5,0.115,22,0 +10,92,62,0,0,25.9,0.167,31,0 +13,104,72,0,0,31.2,0.465,38,1 +5,104,74,0,0,28.8,0.153,48,0 +2,94,76,18,66,31.6,0.649,23,0 +7,97,76,32,91,40.9,0.871,32,1 +1,100,74,12,46,19.5,0.149,28,0 +0,102,86,17,105,29.3,0.695,27,0 +4,128,70,0,0,34.3,0.303,24,0 +6,147,80,0,0,29.5,0.178,50,1 +4,90,0,0,0,28,0.61,31,0 +3,103,72,30,152,27.6,0.73,27,0 +2,157,74,35,440,39.4,0.134,30,0 +1,167,74,17,144,23.4,0.447,33,1 +0,179,50,36,159,37.8,0.455,22,1 +11,136,84,35,130,28.3,0.26,42,1 +0,107,60,25,0,26.4,0.133,23,0 +1,91,54,25,100,25.2,0.234,23,0 +1,117,60,23,106,33.8,0.466,27,0 +5,123,74,40,77,34.1,0.269,28,0 +2,120,54,0,0,26.8,0.455,27,0 +1,106,70,28,135,34.2,0.142,22,0 +2,155,52,27,540,38.7,0.24,25,1 +2,101,58,35,90,21.8,0.155,22,0 +1,120,80,48,200,38.9,1.162,41,0 +11,127,106,0,0,39,0.19,51,0 +3,80,82,31,70,34.2,1.292,27,1 +10,162,84,0,0,27.7,0.182,54,0 +1,199,76,43,0,42.9,1.394,22,1 +8,167,106,46,231,37.6,0.165,43,1 +9,145,80,46,130,37.9,0.637,40,1 +6,115,60,39,0,33.7,0.245,40,1 +1,112,80,45,132,34.8,0.217,24,0 +4,145,82,18,0,32.5,0.235,70,1 +10,111,70,27,0,27.5,0.141,40,1 +6,98,58,33,190,34,0.43,43,0 +9,154,78,30,100,30.9,0.164,45,0 +6,165,68,26,168,33.6,0.631,49,0 +1,99,58,10,0,25.4,0.551,21,0 +10,68,106,23,49,35.5,0.285,47,0 +3,123,100,35,240,57.3,0.88,22,0 +8,91,82,0,0,35.6,0.587,68,0 +6,195,70,0,0,30.9,0.328,31,1 +9,156,86,0,0,24.8,0.23,53,1 +0,93,60,0,0,35.3,0.263,25,0 +3,121,52,0,0,36,0.127,25,1 +2,101,58,17,265,24.2,0.614,23,0 +2,56,56,28,45,24.2,0.332,22,0 +0,162,76,36,0,49.6,0.364,26,1 +0,95,64,39,105,44.6,0.366,22,0 +4,125,80,0,0,32.3,0.536,27,1 +5,136,82,0,0,0,0.64,69,0 +2,129,74,26,205,33.2,0.591,25,0 +3,130,64,0,0,23.1,0.314,22,0 +1,107,50,19,0,28.3,0.181,29,0 +1,140,74,26,180,24.1,0.828,23,0 +1,144,82,46,180,46.1,0.335,46,1 +8,107,80,0,0,24.6,0.856,34,0 +13,158,114,0,0,42.3,0.257,44,1 +2,121,70,32,95,39.1,0.886,23,0 +7,129,68,49,125,38.5,0.439,43,1 +2,90,60,0,0,23.5,0.191,25,0 +7,142,90,24,480,30.4,0.128,43,1 +3,169,74,19,125,29.9,0.268,31,1 +0,99,0,0,0,25,0.253,22,0 +4,127,88,11,155,34.5,0.598,28,0 +4,118,70,0,0,44.5,0.904,26,0 +2,122,76,27,200,35.9,0.483,26,0 +6,125,78,31,0,27.6,0.565,49,1 +1,168,88,29,0,35,0.905,52,1 +2,129,0,0,0,38.5,0.304,41,0 +4,110,76,20,100,28.4,0.118,27,0 +6,80,80,36,0,39.8,0.177,28,0 +10,115,0,0,0,0,0.261,30,1 +2,127,46,21,335,34.4,0.176,22,0 +9,164,78,0,0,32.8,0.148,45,1 +2,93,64,32,160,38,0.674,23,1 +3,158,64,13,387,31.2,0.295,24,0 +5,126,78,27,22,29.6,0.439,40,0 +10,129,62,36,0,41.2,0.441,38,1 +0,134,58,20,291,26.4,0.352,21,0 +3,102,74,0,0,29.5,0.121,32,0 +7,187,50,33,392,33.9,0.826,34,1 +3,173,78,39,185,33.8,0.97,31,1 +10,94,72,18,0,23.1,0.595,56,0 +1,108,60,46,178,35.5,0.415,24,0 +5,97,76,27,0,35.6,0.378,52,1 +4,83,86,19,0,29.3,0.317,34,0 +1,114,66,36,200,38.1,0.289,21,0 +1,149,68,29,127,29.3,0.349,42,1 +5,117,86,30,105,39.1,0.251,42,0 +1,111,94,0,0,32.8,0.265,45,0 +4,112,78,40,0,39.4,0.236,38,0 +1,116,78,29,180,36.1,0.496,25,0 +0,141,84,26,0,32.4,0.433,22,0 +2,175,88,0,0,22.9,0.326,22,0 +2,92,52,0,0,30.1,0.141,22,0 +3,130,78,23,79,28.4,0.323,34,1 +8,120,86,0,0,28.4,0.259,22,1 +2,174,88,37,120,44.5,0.646,24,1 +2,106,56,27,165,29,0.426,22,0 +2,105,75,0,0,23.3,0.56,53,0 +4,95,60,32,0,35.4,0.284,28,0 +0,126,86,27,120,27.4,0.515,21,0 +8,65,72,23,0,32,0.6,42,0 +2,99,60,17,160,36.6,0.453,21,0 +1,102,74,0,0,39.5,0.293,42,1 +11,120,80,37,150,42.3,0.785,48,1 +3,102,44,20,94,30.8,0.4,26,0 +1,109,58,18,116,28.5,0.219,22,0 +9,140,94,0,0,32.7,0.734,45,1 +13,153,88,37,140,40.6,1.174,39,0 +12,100,84,33,105,30,0.488,46,0 +1,147,94,41,0,49.3,0.358,27,1 +1,81,74,41,57,46.3,1.096,32,0 +3,187,70,22,200,36.4,0.408,36,1 +6,162,62,0,0,24.3,0.178,50,1 +4,136,70,0,0,31.2,1.182,22,1 +1,121,78,39,74,39,0.261,28,0 +3,108,62,24,0,26,0.223,25,0 +0,181,88,44,510,43.3,0.222,26,1 +8,154,78,32,0,32.4,0.443,45,1 +1,128,88,39,110,36.5,1.057,37,1 +7,137,90,41,0,32,0.391,39,0 +0,123,72,0,0,36.3,0.258,52,1 +1,106,76,0,0,37.5,0.197,26,0 +6,190,92,0,0,35.5,0.278,66,1 +2,88,58,26,16,28.4,0.766,22,0 +9,170,74,31,0,44,0.403,43,1 +9,89,62,0,0,22.5,0.142,33,0 +10,101,76,48,180,32.9,0.171,63,0 +2,122,70,27,0,36.8,0.34,27,0 +5,121,72,23,112,26.2,0.245,30,0 +1,126,60,0,0,30.1,0.349,47,1 +1,93,70,31,0,30.4,0.315,23,0 \ No newline at end of file diff --git a/pod-brain.Dockerfile b/pod-brain.Dockerfile new file mode 100644 index 00000000000..9b48c9512ef --- /dev/null +++ b/pod-brain.Dockerfile @@ -0,0 +1,20 @@ +# Use to build workflow-pod-brain image + +FROM sbtscala/scala-sbt:eclipse-temurin-jammy-11.0.17_8_1.9.3_2.13.11 + +WORKDIR /core/workflow-pod-brain +COPY core/workflow-pod-brain . +RUN sbt clean package +RUN apt-get update +RUN apt-get install -y netcat unzip + +WORKDIR /core +COPY core/scripts ./scripts +# Add .git for runtime calls to jgit from OPversion +COPY .git ../.git + +RUN scripts/build-brain.sh + +CMD ["scripts/deploy-brain.sh"] + +EXPOSE 8888 diff --git a/pod.Dockerfile b/pod.Dockerfile new file mode 100644 index 00000000000..ecc42c73c0f --- /dev/null +++ b/pod.Dockerfile @@ -0,0 +1,25 @@ +# Use to build image of solely Texera's backend + +FROM sbtscala/scala-sbt:eclipse-temurin-jammy-11.0.17_8_1.9.3_2.13.11 + +WORKDIR /core/amber +COPY core/amber . +RUN sbt clean package +RUN apt-get update +RUN apt-get install -y netcat unzip python3-pip +RUN pip3 install python-lsp-server python-lsp-server[websockets] +RUN pip3 install -r requirements.txt +RUN pip3 install -r operator-requirements.txt + +WORKDIR /core +COPY core/scripts ./scripts +# Add .git for runtime calls to jgit from OPversion +COPY .git ../.git + +COPY diabetes.csv ./diabetes.csv + +RUN scripts/build-docker.sh + +CMD ["scripts/deploy-docker-trap.sh"] + +EXPOSE 8080