diff --git a/.gitignore b/.gitignore index bc8e8fa..fa7b136 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,4 @@ target/ *.rs.bk /adapters/file/model.json +/config-test/* diff --git a/Dockerfile b/Dockerfile index 9f3f8b4..3f6dbe4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -50,6 +50,7 @@ ENV HASURA_CONFIGURATION_DIRECTORY=/etc/connector ENV RUST_BACKTRACE=full COPY calcite-rs-jni/ /calcite-rs-jni/ +COPY config-templates/ /config-templates/ WORKDIR /calcite-rs-jni/calcite RUN ./gradlew assemble diff --git a/adapters/jdbc/.env.local b/adapters/jdbc/.env.local index 76079f1..3e0a162 100644 --- a/adapters/jdbc/.env.local +++ b/adapters/jdbc/.env.local @@ -1,4 +1,3 @@ RUST_LOG=info OTEL_LOGS_EXPORTER=console OTEL_LOG_LEVEL=trace -LOG4J_CONFIGURATION_FILE=../../calcite-rs-jni/target/classes/log4j2.xml diff --git a/build.sh b/build.sh new file mode 100755 index 0000000..6e55f48 --- /dev/null +++ b/build.sh @@ -0,0 +1,11 @@ +release_info=$(curl -L \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer $GITHUB_TOKEN" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/hasura/ndc-calcite/releases/latest) +TAG=$(echo "$release_info" | grep 'tag_name' | awk -F':' '{print $2}' | tr -d ' "",') +tar -czvf connector-definition.tgz connector-definition +docker build . --platform linux/arm64,linux/amd64 -t kstott/meta_connector:latest +docker tag kstott/meta_connector:latest kstott/meta_connector:"$TAG" +docker push kstott/meta_connector:latest +docker push kstott/meta_connector:"$TAG" diff --git a/calcite-rs-jni/calcite b/calcite-rs-jni/calcite index 96cbcc5..14f430b 160000 --- a/calcite-rs-jni/calcite +++ b/calcite-rs-jni/calcite @@ -1 +1 @@ -Subproject commit 96cbcc5a4aa162944f5e2246e8cb05632ff69069 +Subproject commit 14f430b8a28e205dac6387dd7cdc8b146fe4ad71 diff --git a/ci/deploy.sh b/ci/deploy.sh deleted file mode 100755 index b0c5cb1..0000000 --- a/ci/deploy.sh +++ /dev/null @@ -1,139 +0,0 @@ -#!/usr/bin/env bash -# -# To get the skopeo dependency automatically, run with: -# -# $ nix run .#publish-docker-image -# -set -euo pipefail - -DRY_RUN=false -if [[ "${1:-}" == '-n' || "${1:-}" == '--dry-run' ]]; then - DRY_RUN=true - echo "$(tput bold)$(tput setaf 1)DRY RUN; some steps will be skipped$(tput sgr0)" - shift -fi - -if [[ $# -ne 2 ]]; then - echo >&2 "Usage: ${0} [-n|--dry-run] REF IMAGE" - echo >&2 - echo >&2 ' REF should be in the form "refs/heads/" or "refs/tags/"' - echo >&2 ' (in a Github workflow the variable "github.ref" has this format)' - echo >&2 - echo >&2 ' IMAGE is the path of the Docker image, e.g. "ghcr.io/hasura/ndc-postgres"' - echo >&2 - echo >&2 ' "--dry-run" will not push anything, but it will still build' - exit 1 -fi - -github_ref="$1" -image="$2" - -# Runs the given command, unless `--dry-run` was set. -function run { - if "$DRY_RUN"; then - echo "$(tput bold)$(tput setaf 1)not running:$(tput sgr0) $*" - else - echo "$(tput bold)$(tput setaf 2)running:$(tput sgr0) $*" - "$@" - fi -} - -# Assumes that the given ref is a branch name. Sets a tag for a docker image of -# the form: -# -# dev-main-bffd555 -# --- ---- ------- -# ↑ ↑ ↑ -# prefix "dev" | commit hash -# branch -# -# Additionally sets a branch tag assuming this is the latest tag for the given -# branch. The branch tag has the form: dev-main -# Also sets the 'latest' tag -# Also sets a tag with just the branch short hash -function set_dev_tags { - local branch="$1" - # replace '.' and '/' in branch name with '-' - local tidy_branch - tidy_branch="$(tr './' '-' <<< "$branch")" - local branch_prefix="dev-${tidy_branch}" - local version - local short_hash - short_hash="$(git rev-parse --short=9 HEAD)" - version="${branch_prefix}-${short_hash}" - export docker_tags=("$version" "$branch_prefix" "$short_hash" "latest") -} - -# The Github workflow passes a ref of the form refs/heads/ or -# refs/tags/. This function sets an array of docker image tags based -# on either the given branch or tag name. -# -# If a tag name does not start with a "v" it is assumed to not be a release tag -# so the function sets an empty array. -# -# If the input does look like a release tag, set the tag name as the sole docker -# tag. -# -# If the input is a branch, set docker tags via `set_dev_tags`. -function set_docker_tags { - local input - input="$1" - if [[ $input =~ ^refs/tags/(v.*)$ ]]; then - local tag="${BASH_REMATCH[1]}" - export docker_tags=("$tag" "latest") - elif [[ $input =~ ^refs/heads/(.*)$ ]]; then - local branch="${BASH_REMATCH[1]}" - set_dev_tags "$branch" - else - export docker_tags=("latest") - fi -} - -function publish_multi_arch { - local input - local image_archive - local image_path_for_arch - - architectures=('aarch64' 'x86_64') - - input="$1" - set_docker_tags "$input" - - # do nothing if no tags found - if [[ ${#docker_tags[@]} == 0 ]]; then - echo "The given ref, ${input}, was not a release tag or a branch - will not publish a docker image" - exit - fi - - # build and push the individual images for each architecture - for arch in "${architectures[@]}"; do - # build the docker image - image_archive="docker-archive://$(nix build --print-out-paths ".#docker-${arch}-linux")" - - echo "Will publish docker image with tags: ${docker_tags[*]}" - skopeo inspect "$image_archive" - - image_path_for_arch="${image}-${arch}" - for tag in "${docker_tags[@]}"; do - echo - echo "Pushing docker://${image_path_for_arch}:${tag}" - run skopeo copy "$image_archive" "docker://${image_path_for_arch}:${tag}" - done - done - - # now create and push the manifest - for tag in "${docker_tags[@]}"; do - echo "Creating manifest for ${image}:${tag}" - # create a manifest referencing both architectures - # i did not use a loop here, forgive me - run docker manifest create \ - "$image:$tag" \ - --amend "${image}-aarch64:${tag}" \ - --amend "${image}-x86_64:${tag}" - - # push manifest as the main image - run docker manifest push "${image}:${tag}" - done -} - -publish_multi_arch "$github_ref" diff --git a/config-templates/.env.local b/config-templates/.env.local new file mode 100644 index 0000000..afdaddc --- /dev/null +++ b/config-templates/.env.local @@ -0,0 +1,4 @@ +RUST_LOG=info +OTEL_LOGS_EXPORTER=console +OTEL_LOG_LEVEL=trace +OTEL_EXPORTER_OTLP_ENDPOINT=http://local.hasura.dev:4317 diff --git a/config-templates/configuration.json b/config-templates/configuration.json new file mode 100644 index 0000000..fdc10dd --- /dev/null +++ b/config-templates/configuration.json @@ -0,0 +1,6 @@ +{ + "version": "4", + "$schema": "schema.json", + "model_file_path": "/etc/connector/model.json", + "fixes": true +} diff --git a/config-templates/data/chinook.db b/config-templates/data/chinook.db new file mode 100644 index 0000000..e69de29 diff --git a/config-templates/data/files/ARCHERS.json b/config-templates/data/files/ARCHERS.json new file mode 100644 index 0000000..65298e3 --- /dev/null +++ b/config-templates/data/files/ARCHERS.json @@ -0,0 +1,42 @@ +[ + { + "id": 19990101, + "a": "Friday", + "b": "New Years Day", + "c": "Tractor trouble.", + "d": [ + "Alice", + "Bob", + "Xavier" + ], + "e": "Julian Hyde", + "f": "", + "g": [ + "Bob's tractor got stuck in a field.", + "Alice and Xavier hatch a plan to surprise Charlie." + ], + "object": { + "count": 1 + } + }, + { + "id": 19990103, + "a": "Sunday", + "b": "Sunday 3rd January", + "c": "Charlie's surprise.", + "d": [ + "Alice", + "Zebedee", + "Charlie", + "Xavier" + ], + "e": "William Shakespeare", + "f": "", + "g": [ + "Charlie is very surprised by Alice and Xavier's surprise plan." + ], + "object": { + "count": 2 + } + } +] diff --git a/config-templates/data/files/DATES.csv b/config-templates/data/files/DATES.csv new file mode 100644 index 0000000..9ba816e --- /dev/null +++ b/config-templates/data/files/DATES.csv @@ -0,0 +1,9 @@ +EMPNO:int,JOINEDAT:date,JOINTIME:time,JOINTIMES:timestamp +100,"1996-08-03","00:01:02","1996-08-03 00:01:02" +110,"2001-01-01","00:00:00","2001-01-01 00:00:00" +110,"2002-05-03","00:00:00","2002-05-03 00:00:00" +120,"2005-09-07","00:00:00","2005-09-07 00:00:00" +130,"2007-01-01","00:00:00","2007-01-01 00:00:00" +140,"2015-12-31","07:15:56","2015-12-31 07:15:56" +200,,,, +150,"2015-12-31","13:31:21","2015-12-31 13:31:21" diff --git a/config-templates/data/files/DEPTS.csv b/config-templates/data/files/DEPTS.csv new file mode 100644 index 0000000..628f2d8 --- /dev/null +++ b/config-templates/data/files/DEPTS.csv @@ -0,0 +1,4 @@ +DEPTNO:int,NAME:string +10,"Sales" +20,"Marketing" +30,"Accounts" diff --git a/config-templates/data/files/EMPS.csv.gz b/config-templates/data/files/EMPS.csv.gz new file mode 100644 index 0000000..294bad4 Binary files /dev/null and b/config-templates/data/files/EMPS.csv.gz differ diff --git a/config-templates/data/files/LONG_EMPS.csv b/config-templates/data/files/LONG_EMPS.csv new file mode 100644 index 0000000..f69e0c5 --- /dev/null +++ b/config-templates/data/files/LONG_EMPS.csv @@ -0,0 +1,6 @@ +EMPNO:long,NAME:string,DEPTNO:int,GENDER:string,CITY:string,EMPID:int,AGE:int,SLACKER:boolean,MANAGER:boolean,JOINEDAT:date +100,"Fred",10,,,30,25,true,false,"1996-08-03" +110,"Eric",20,"M","San Francisco",3,80,,false,"2001-01-01" +110,"John",40,"M","Vancouver",2,,false,true,"2002-05-03" +120,"Wilma",20,"F",,1,5,,true,"2005-09-07" +130,"Alice",40,"F","Vancouver",2,,false,true,"2007-01-01" diff --git a/config-templates/data/files/SDEPTS.csv b/config-templates/data/files/SDEPTS.csv new file mode 100644 index 0000000..b555c42 --- /dev/null +++ b/config-templates/data/files/SDEPTS.csv @@ -0,0 +1,7 @@ +DEPTNO:int,NAME:string +10,"Sales" +20,"Marketing" +30,"Accounts" +40,"40" +50,"50" +60,"60" diff --git a/config-templates/data/files/WACKY_COLUMN_NAMES.csv b/config-templates/data/files/WACKY_COLUMN_NAMES.csv new file mode 100644 index 0000000..453d961 --- /dev/null +++ b/config-templates/data/files/WACKY_COLUMN_NAMES.csv @@ -0,0 +1,6 @@ +EMPNO:int,naME:string,DEPTNO:Integer,2gender:string,CITY:string,EMPID:int,AGE:int,SLACKER:boolean,MANAGER:boolean,joined at:date +100,"Fred",10,,,30,25,true,false,"1996-08-03" +110,"Eric",20,"M","San Francisco",3,80,,false,"2001-01-01" +110,"John",40,"M","Vancouver",2,,false,true,"2002-05-03" +120,"Wilma",20,"F",,1,5,,true,"2005-09-07" +130,"Alice",40,"F","Vancouver",2,,false,true,"2007-01-01" diff --git a/config-templates/docker-compose.file.calcite.yaml b/config-templates/docker-compose.file.calcite.yaml index c4ccd56..1407046 100644 --- a/config-templates/docker-compose.file.calcite.yaml +++ b/config-templates/docker-compose.file.calcite.yaml @@ -1,10 +1,9 @@ services: - ${SUBGRAPH}_${CONNECTOR}: + meta_connector: build: context: . dockerfile_inline: |- - FROM kstott/calcite_connector:latest - COPY ./ /etc/connector + FROM docker.io/kstott/meta_connector:latest develop: watch: - path: ./ @@ -20,5 +19,5 @@ services: published: ${HASURA_CONNECTOR_PORT:8080} protocol: tcp volumes: - - ${MODEL}:/model.json - - ./configuration.json:/update/connector/configuration.json + - .:/etc/connector + - ${MODEL_FILE:-./models/model.json}:/etc/connector/model.json diff --git a/config-templates/model.json b/config-templates/model.json new file mode 100644 index 0000000..969d428 --- /dev/null +++ b/config-templates/model.json @@ -0,0 +1,14 @@ +{ + "version": "1.0", + "defaultSchema": "test", + "schemas": [ + { + "type": "jdbc", + "name": "test", + "jdbcUser": "kenstott", + "jdbcPassword": "rN8qOh6AEMCP", + "jdbcUrl": "jdbc:postgresql://ep-yellow-salad-961725.us-west-2.aws.neon.tech/crisp-sheepdog-47_db_3216533?sslmode=require", + "jdbcCatalog": "public" + } + ] + } \ No newline at end of file diff --git a/config-templates/models/model.files.json b/config-templates/models/model.files.json new file mode 100644 index 0000000..9022167 --- /dev/null +++ b/config-templates/models/model.files.json @@ -0,0 +1,14 @@ +{ + "version": "1.0", + "defaultSchema": "sales", + "schemas": [ + { + "type": "custom", + "name": "sales", + "factory": "org.apache.calcite.adapter.file.FileSchemaFactory", + "operand": { + "directory": "/etc/connector/data/files" + } + } + ] +} \ No newline at end of file diff --git a/config-templates/models/model.json b/config-templates/models/model.json new file mode 100644 index 0000000..bb3559a --- /dev/null +++ b/config-templates/models/model.json @@ -0,0 +1,12 @@ +{ + "version": "1.0", + "defaultSchema": "test", + "schemas": [ + { + "type": "jdbc", + "name": "test", + "sqlDialectFactory": "org.kenstott.SQLiteSqlDialectFactory", + "jdbcUrl": "jdbc:sqlite:/etc/connector/data/chinook.db" + } + ] + } \ No newline at end of file diff --git a/config-templates/models/model.pg.json b/config-templates/models/model.pg.json new file mode 100644 index 0000000..63baee2 --- /dev/null +++ b/config-templates/models/model.pg.json @@ -0,0 +1,14 @@ +{ + "version": "1.0", + "defaultSchema": "test", + "schemas": [ + { + "type": "jdbc", + "name": "test", + "jdbcUser": "", + "jdbcPassword": "", + "jdbcUrl": "jdbc:postgresql:///?sslmode=require", + "jdbcCatalog": "public" + } + ] + } \ No newline at end of file diff --git a/config-templates/schema.json b/config-templates/schema.json new file mode 100644 index 0000000..4ee9e59 --- /dev/null +++ b/config-templates/schema.json @@ -0,0 +1,133 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "ParsedConfiguration", + "type": "object", + "required": [ + "version" + ], + "properties": { + "version": { + "$ref": "#/definitions/Version" + }, + "$schema": { + "type": "string" + }, + "model": { + "type": "object", + "properties": { + "version": { + "type": "string" + }, + "defaultSchema": { + "type": "string" + }, + "schemas": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "name": { + "type": "string" + }, + "jdbcUser": { + "type": "string" + }, + "jdbcPassword": { + "type": "string" + }, + "jdbcUrl": { + "type": "string" + }, + "sqlDialectFactory": { + "type": "string" + }, + "jdbcCatalog": { + "type": "string" + }, + "jdbcSchema": {}, + "factory": { + "type": "string" + }, + "operand": { + "type": "object", + "properties": { + "directory": { + "type": "string" + }, + "host": { + "type": "string" + }, + "port": { + "type": "number" + }, + "database": { + "type": "string" + }, + "password": { + "type": "string" + } + } + }, + "tables": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "factory": { + "type": "string" + }, + "operand": { + "type": "object", + "properties": { + "dataFormat": { + "type": "string" + }, + "fields": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "type": { + "type": "string" + }, + "mapping": {} + }, + "required": [ + "name", + "type", + "mapping" + ] + } + }, + "keyDelimiter": { + "type": "string" + } + } + } + } + } + } + } + } + } + } + } + }, + "definitions": { + "Version": { + "type": "string", + "enum": [ + "4" + ] + } + } +} \ No newline at end of file diff --git a/connector-definition.tgz b/connector-definition.tgz new file mode 100644 index 0000000..fa18772 Binary files /dev/null and b/connector-definition.tgz differ diff --git a/connector-metadata.yaml b/connector-definition/connector-metadata.yaml similarity index 58% rename from connector-metadata.yaml rename to connector-definition/connector-metadata.yaml index 8a82887..4d394f0 100644 --- a/connector-metadata.yaml +++ b/connector-definition/connector-metadata.yaml @@ -1,9 +1,9 @@ packagingDefinition: type: PrebuiltDockerImage - dockerImage: "docker.io/kstott/calcite_connector:latest" + dockerImage: "docker.io/kstott/meta_connector:latest" supportedEnvironmentVariables: - name: MODEL_FILE - description: The location of the calcite model file + description: The location of the calcite model file defaults to /etc/connector/models/model.json commands: update: hasura-calcite update cliPlugin: @@ -12,4 +12,4 @@ cliPlugin: dockerComposeWatch: - path: ./ target: /etc/connector - action: sync+restart \ No newline at end of file + action: sync+restart diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index 537e77e..c9bd816 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -9,7 +9,8 @@ pub mod configuration; pub mod error; -use std::path::PathBuf; +use std::{env, io}; +use std::path::{Path, PathBuf}; use clap::Subcommand; use tokio::fs; @@ -62,6 +63,33 @@ pub async fn run(command: Command, context: Context) -> anyhow Ok(()) } +fn copy_files(input_dir: &str, output_dir: &str) -> io::Result<()> { + let input_path = Path::new(input_dir); + let output_path = Path::new(output_dir); + + if !output_path.exists() { + std::fs::create_dir_all(&output_path)?; + } + + if input_path.is_dir() { + for entry in std::fs::read_dir(input_path)? { + let entry = entry?; + let entry_path = entry.path(); + let output_file_path = output_path.join(entry_path.file_name().unwrap()); + if entry_path.is_dir() { + copy_files(entry_path.to_str().unwrap(), output_file_path.to_str().unwrap())?; + } else if entry_path.is_file() { + std::fs::copy(entry_path, output_file_path)?; + } + } + } + Ok(()) +} + +fn is_running_in_container() -> bool { + Path::new("/.dockerenv").exists() || env::var("KUBERNETES_SERVICE_HOST").is_ok() +} + /// Initialize an empty directory with an empty ndc-calcite configuration. /// /// An empty configuration contains default settings and options, and is expected to be filled with @@ -77,6 +105,15 @@ async fn initialize(with_metadata: bool, context: Context) -> Err(Error::DirectoryIsNotEmpty)?; } + let config_path = if is_running_in_container() { + Path::new("/config-templates") + } else { + Path::new("../config-templates") + }; + let context_path_str = context.context_path.to_str().ok_or(anyhow::anyhow!("Failed to convert PathBuf to &str"))?; + let config_path_str = config_path.to_str().ok_or(anyhow::anyhow!("Failed to convert PathBuf to &str"))?; + let _ = copy_files(config_path_str, context_path_str); + configuration::write_parsed_configuration( configuration::ParsedConfiguration::initial(), &context.context_path, @@ -92,7 +129,7 @@ async fn initialize(with_metadata: bool, context: Context) -> packaging_definition: metadata::PackagingDefinition::PrebuiltDockerImage( metadata::PrebuiltDockerImagePackaging { docker_image: format!( - "ghcr.io/hasura/ndc-calcite:{}", + "docker.io/kstott/meta-connector:{}", context.release_version.unwrap_or("latest") ), }, @@ -100,7 +137,7 @@ async fn initialize(with_metadata: bool, context: Context) -> supported_environment_variables: vec![metadata::EnvironmentVariableDefinition { name: "MODEL_FILE".to_string(), description: "The Calcite connection model".to_string(), - default_value: Some("./model.json".to_string()), + default_value: Some("/etc/connection/models/model.json".to_string()), }], commands: metadata::Commands { update: Some("hasura-ndc-calcite update".to_string()), @@ -112,7 +149,7 @@ async fn initialize(with_metadata: bool, context: Context) -> }), docker_compose_watch: vec![metadata::DockerComposeWatchItem { path: "./".to_string(), - target: Some("/etc/ndc-calcite".to_string()), + target: Some(".".to_string()), action: metadata::DockerComposeWatchAction::SyncAndRestart, ignore: vec![], }], diff --git a/crates/connectors/ndc-calcite/src/calcite.rs b/crates/connectors/ndc-calcite/src/calcite.rs index d9b4d88..547bbaf 100644 --- a/crates/connectors/ndc-calcite/src/calcite.rs +++ b/crates/connectors/ndc-calcite/src/calcite.rs @@ -13,7 +13,7 @@ use jni::objects::JValueGen::Object; use ndc_models as models; use ndc_models::RowFieldValue; use ndc_sdk::connector::{InitializationError, QueryError}; -use serde_json::{Error, Value}; +use serde_json::{Value}; use tracing::{event, Level}; use crate::configuration::CalciteConfiguration; diff --git a/crates/connectors/ndc-calcite/src/connector/calcite.rs b/crates/connectors/ndc-calcite/src/connector/calcite.rs index f8ccbe5..8f1b09b 100644 --- a/crates/connectors/ndc-calcite/src/connector/calcite.rs +++ b/crates/connectors/ndc-calcite/src/connector/calcite.rs @@ -3,10 +3,8 @@ //! Provides HTTP server paths for required NDC functions. Connecting //! the request to the underlying code and providing the result. //! -use std::collections::{BTreeMap}; -use std::fs; -use std::fs::File; -use std::io::Write; +use std::collections::BTreeMap; +use std::{env, fs}; use std::path::Path; use async_trait::async_trait; @@ -19,9 +17,7 @@ use ndc_sdk::connector::{ }; use ndc_sdk::json_response::JsonResponse; use ndc_sdk::models; -use serde::de::Unexpected::Option; -use serde_json::{to_string_pretty}; -use tracing::{info_span}; +use tracing::info_span; use tracing::Instrument; use crate::{calcite, jvm, query, schema}; @@ -63,7 +59,7 @@ pub struct CalciteState { /// Returns `true` if the code is running inside a container, `false` otherwise. #[tracing::instrument] pub fn is_running_in_container() -> bool { - Path::new("/.dockerenv").exists() + Path::new("/.dockerenv").exists() || env::var("KUBERNETES_SERVICE_HOST").is_ok() } #[tracing::instrument] @@ -116,7 +112,7 @@ impl ConnectorSetup for Calcite { match fs::read_to_string(model_file_path) { Ok(models) => { println!("Configuration model content: {:?}", models); - let mut model_object: Model = serde_json::from_str(&models) + let model_object: Model = serde_json::from_str(&models) .map_err(|err| ParseError::Other(Box::from(err.to_string())))?; json_object.model = Some(model_object) }, diff --git a/crates/connectors/ndc-calcite/src/jvm.rs b/crates/connectors/ndc-calcite/src/jvm.rs index 3c0f762..3d44c4d 100644 --- a/crates/connectors/ndc-calcite/src/jvm.rs +++ b/crates/connectors/ndc-calcite/src/jvm.rs @@ -104,7 +104,7 @@ pub fn init_jvm(calcite_configuration: &CalciteConfiguration) { None => { /* handle None case if necessary */ } } - let otel_exporter_otlp_endpoint = env::var("OTEL_EXPORTER_OTLP_ENDPOINT").unwrap_or("".to_string()); + let otel_exporter_otlp_endpoint = env::var("OTEL_EXPORTER_OTLP_ENDPOINT").unwrap_or("http://local.hasura.dev:4317".to_string()); let otel_service_name = env::var("OTEL_SERVICE_NAME").unwrap_or("".to_string()); let otel_logs_exported = env::var("OTEL_LOGS_EXPORTER").unwrap_or("".to_string()); let otel_log_level = env::var("OTEL_LOG_LEVEL").unwrap_or("".to_string()); @@ -119,24 +119,31 @@ pub fn init_jvm(calcite_configuration: &CalciteConfiguration) { jvm_args = jvm_args.option( format!("-DOTEL_EXPORTER_OTLP_ENDPOINT={}", otel_exporter_otlp_endpoint) ); + println!("Added {} to JVM", format!("-DOTEL_EXPORTER_OTLP_ENDPOINT={}", otel_exporter_otlp_endpoint)) } if !otel_service_name.is_empty() { jvm_args = jvm_args.option( format!("-DOTEL_SERVICE_NAME={}", otel_service_name) ); + println!("Added {} to JVM", format!("-DOTEL_SERVICE_NAME={}", otel_service_name)); } if !otel_logs_exported.is_empty() { jvm_args = jvm_args.option( format!("-DOTEL_LOGS_EXPORTED={}", otel_logs_exported) ); + println!("Added {} to JVM", format!("-DOTEL_LOGS_EXPORTED={}", otel_logs_exported)); } if !otel_log_level.is_empty() { jvm_args = jvm_args.option( format!("-DOTEL_LOG_LEVEL={}", otel_log_level) ); + println!("Added {} to JVM", format!("-DOTEL_LOG_LEVEL={}", otel_log_level)); } if !expanded_paths.is_empty() { - jvm_args = jvm_args.option(["-Djava.class.path=", &expanded_paths].join("")) + jvm_args = jvm_args.option( + format!("-Djava.class.path={}", &expanded_paths) + ); + println!("Added {} to JVM", format!("-Djava.class.path={}", &expanded_paths)); } let jvm_args = jvm_args.build().unwrap(); let jvm = JavaVM::new(jvm_args).unwrap(); diff --git a/crates/connectors/ndc-calcite/src/schema.rs b/crates/connectors/ndc-calcite/src/schema.rs index 970e085..5a465af 100644 --- a/crates/connectors/ndc-calcite/src/schema.rs +++ b/crates/connectors/ndc-calcite/src/schema.rs @@ -79,14 +79,15 @@ pub fn get_schema(configuration: &CalciteConfiguration, calcite_ref: GlobalRef) procedures, }; let file_path = if is_running_in_container() { - Path::new("/update/ndc-calcite").join(CONFIG_FILE_NAME) + Path::new("/etc/connector").join(CONFIG_FILE_NAME) } else { Path::new(".").join(DEV_CONFIG_FILE_NAME) }; event!(Level::INFO, config_path = format!("Configuration file path: {}", file_path.display())); let mut new_configuration = configuration.clone(); new_configuration.metadata = Some(data_models.clone()); - let mut file = File::create(file_path); + let file_path_clone = file_path.clone(); + let file = File::create(file_path); match file { Ok(mut file) => { let serialized_json = serde_json::to_string_pretty(&new_configuration)?; @@ -96,7 +97,9 @@ pub fn get_schema(configuration: &CalciteConfiguration, calcite_ref: GlobalRef) schema = serde_json::to_string(&schema).unwrap() ); } - Err(_) => {} + Err(_err) => { + println!("Unable to create config file: {:?}", file_path_clone) + } } Ok(schema)