Skip to content

Commit

Permalink
Merge pull request #3 from hasura/py/update-model-file
Browse files Browse the repository at this point in the history
Update env vars present in model.json file
  • Loading branch information
gneeri authored Oct 14, 2024
2 parents a471f3f + 162390a commit 9f13e9c
Show file tree
Hide file tree
Showing 5 changed files with 191 additions and 30 deletions.
23 changes: 12 additions & 11 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ insta = "1"
jsonschema = "0.18.0"
log = "0.4.22"
prometheus = "0.13"
regex = "1.11.0"
reqwest = "0.11"
schemars = "0.8"
serde = "1"
Expand Down
1 change: 1 addition & 0 deletions crates/cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ tracing-subscriber = "0.3.18"
tracing = "0.1.40"
log = "0.4.22"
env_logger = "0.11.5"
regex = { workspace = true }

[build-dependencies]
build-data = { workspace = true }
Expand Down
193 changes: 175 additions & 18 deletions crates/cli/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,27 @@
//! The CLI can do a few things. This provides a central point where those things are routed and
//! then done, making it easier to test this crate deterministically.

use std::path::{PathBuf};
use regex::Regex;
use std::path::PathBuf;

use anyhow::Ok;
use clap::Subcommand;
use include_dir::{DirEntry, include_dir};
use include_dir::Dir;
use include_dir::{include_dir, DirEntry};
use std::collections::BTreeMap;
use tokio::fs;

use ndc_calcite_schema::configuration::{has_configuration, introspect, parse_configuration, ParsedConfiguration, upgrade_to_latest_version, write_parsed_configuration};
use ndc_calcite_schema::environment::Environment;
use ndc_calcite_schema::configuration::{
has_configuration, introspect, parse_configuration, upgrade_to_latest_version,
write_parsed_configuration, ParsedConfiguration,
};
use ndc_calcite_schema::environment::{Environment, Variable};
use ndc_calcite_schema::jvm::init_jvm;
use ndc_calcite_schema::version5::CalciteRefSingleton;
use ndc_calcite_values::is_running_in_container::is_running_in_container;
use ndc_calcite_values::values::{DOCKER_CONNECTOR_DIR, DOCKER_CONNECTOR_RW, DOCKER_IMAGE_NAME, UNABLE_TO_WRITE_TO_FILE};
use ndc_calcite_values::values::{
DOCKER_CONNECTOR_DIR, DOCKER_CONNECTOR_RW, DOCKER_IMAGE_NAME, UNABLE_TO_WRITE_TO_FILE,
};

mod metadata;

Expand Down Expand Up @@ -56,8 +64,12 @@ pub enum Error {
}

/// Run a command in a given directory.
#[tracing::instrument(skip(context,calcite_ref_singleton))]
pub async fn run(command: Command, context: Context<impl Environment>, calcite_ref_singleton: CalciteRefSingleton) -> anyhow::Result<()> {
#[tracing::instrument(skip(context, calcite_ref_singleton))]
pub async fn run(
command: Command,
context: Context<impl Environment>,
calcite_ref_singleton: CalciteRefSingleton,
) -> anyhow::Result<()> {
match command {
Command::Initialize { with_metadata } => initialize(with_metadata, &context).await?,
Command::Update => update(context, &calcite_ref_singleton).await?,
Expand All @@ -78,7 +90,10 @@ const MODELS_DIR: Dir = include_dir!("$CARGO_MANIFEST_DIR/assets");
/// automatically work with this CLI as a plugin.

#[tracing::instrument(skip(context))]
async fn initialize(with_metadata: bool, context: &Context<impl Environment>) -> anyhow::Result<()> {
async fn initialize(
with_metadata: bool,
context: &Context<impl Environment>,
) -> anyhow::Result<()> {
let docker_config_path = &PathBuf::from(DOCKER_CONNECTOR_RW);
let config_path = if is_running_in_container() {
docker_config_path
Expand All @@ -89,7 +104,7 @@ async fn initialize(with_metadata: bool, context: &Context<impl Environment>) ->
Err(Error::DirectoryIsNotEmpty)?;
}

write_parsed_configuration(ParsedConfiguration::initial(), config_path, ).await?;
write_parsed_configuration(ParsedConfiguration::initial(), config_path).await?;

for entry in MODELS_DIR.find("**/*").unwrap() {
match entry {
Expand Down Expand Up @@ -123,7 +138,10 @@ async fn initialize(with_metadata: bool, context: &Context<impl Environment>) ->
supported_environment_variables: vec![metadata::EnvironmentVariableDefinition {
name: "MODEL_FILE".to_string(),
description: "The calcite connection model file path".to_string(),
default_value: Some(format!("{}/models/model.json", DOCKER_CONNECTOR_DIR).to_string()),
default_value: Some(
format!("{}/models/model.json", DOCKER_CONNECTOR_DIR).to_string(),
),
required: true,
}],
commands: metadata::Commands {
update: Some(update_command.to_string()),
Expand All @@ -147,8 +165,144 @@ async fn initialize(with_metadata: bool, context: &Context<impl Environment>) ->
/// Update the configuration in the current directory by introspecting the database.
///
/// If the directory is empty - it will initialize with the core files first.
#[tracing::instrument(skip(context,calcite_ref_singleton))]
async fn update(context: Context<impl Environment>, calcite_ref_singleton: &CalciteRefSingleton) -> anyhow::Result<()> {
#[tracing::instrument(skip(context, calcite_ref_singleton))]
async fn update(
context: Context<impl Environment>,
calcite_ref_singleton: &CalciteRefSingleton,
) -> anyhow::Result<()> {
let docker_config_path = &PathBuf::from(DOCKER_CONNECTOR_DIR);
let config_path = if is_running_in_container() {
docker_config_path
} else {
&context.context_path
};

// Read the `connector-metadata.yaml` file and create a map of supported environment variables
let metadata_yaml_file = config_path.join(".hasura-connector/connector-metadata.yaml");
let metadata = if metadata_yaml_file.exists() {
let metadata_yaml = fs::read_to_string(metadata_yaml_file).await?;
Ok(Some(serde_yaml::from_str::<
metadata::ConnectorMetadataDefinition,
>(&metadata_yaml)?))
} else {
Err(anyhow::Error::msg("Metadata file does not exist"))
}?;
let supported_env_vars = metadata
.as_ref()
.map(|m| m.supported_environment_variables.clone())
.unwrap_or_default();

let mut env_var_map = BTreeMap::new();
for env_var in supported_env_vars.iter() {
match (env_var.required, &env_var.default_value) {
// if required and no default value, throw an error
(true, None) => {
let variable_value = context
.environment
.read(&Variable::new(env_var.name.clone()))
.map_err(|err| match err {
ndc_calcite_schema::environment::Error::NonUnicodeValue(os_string) => {
anyhow::Error::msg(format!("Non-Unicode value: {:?}", os_string))
}
ndc_calcite_schema::environment::Error::VariableNotPresent(variable) => {
anyhow::Error::msg(format!("Variable not present: {:?}", variable))
}
})?;
env_var_map.insert(env_var.name.clone(), variable_value);
}
// if required and default value:
// 1. return the default value if the env var is not present
// 2. throw an error if there is a problem reading the env var
(true, Some(default)) => {
let variable_value = context
.environment
.read(&Variable::new(env_var.name.clone()));
let variable_value_result = match variable_value {
Result::Ok(value) => Result::Ok(value),
Err(err) => {
if err
== (ndc_calcite_schema::environment::Error::VariableNotPresent(
Variable::new(env_var.name.clone()),
))
{
Ok(default.to_string())
} else {
Err(anyhow::Error::msg(format!(
"Error reading the env var: {}",
env_var.name.clone()
)))
}
}
}?;
env_var_map.insert(env_var.name.clone(), variable_value_result);
}
// if not required and no default is present, return an
// empty value if the env var is not present. Note: if
// the type of the env var is not string, and the model
// file has a non-string placeholder (e.g. <$>FOO), it'll
// return just a space which will lead to a JSON parsing
// error. So, default values for non-string env vars are
// required.
(false, None) => {
let variable_value = context
.environment
.read(&Variable::new(env_var.name.clone()))
.unwrap_or_default();
env_var_map.insert(env_var.name.clone(), variable_value);
}
// if not required and default value is present, return the default value
(false, Some(default)) => {
let variable_value = context
.environment
.read(&Variable::new(env_var.name.clone()))
.unwrap_or(default.to_string());
env_var_map.insert(env_var.name.clone(), variable_value);
}
}
}

// Replace the placeholders in the model file with the environment variables

let model_file = config_path.join("model.json");
let mut model_file_value = if model_file.exists() {
let model_json_stringified = fs::read_to_string(model_file.clone()).await?;
Ok(model_json_stringified)
} else {
Err(anyhow::Error::msg("Model file does not exist"))
}?;

// for each env var present in the map from the metadata file, replace the placeholder in the model file
for (key, value) in &env_var_map {
// include the identifiers with the env var to avoid replacing the wrong value
let env_var_identifier = format!("{{{{{}}}}}", key);
model_file_value = model_file_value.replace(&env_var_identifier, value);
}

// Create a regex pattern to match `{{*}}`
let re = Regex::new(r"\{\{.*?\}\}").unwrap();

// check if there is any placeholder left in the model file, which means
// there is an extra env var which is not allowed in the metadata or there is
// a mismatch between the two files.
let final_model_string = if re.is_match(&model_file_value) {
Err(anyhow::Error::msg(
"Some environment variable placeholders are not updated in the model file",
))
} else {
Ok(model_file_value)
}?;
// convert the final model value to JSON value
let updated_model: serde_json::Value = serde_json::from_str(&final_model_string).map_err(|err|
anyhow::Error::msg(format!("Not a valid JSON (the default value of a non string env variable might be missing): {}", err))
)?;

fs::write(
model_file,
serde_json::to_string_pretty(&updated_model).unwrap(),
)
.await?;

// Introspect the database
let docker_config_path = &PathBuf::from(DOCKER_CONNECTOR_RW);
let config_path = if is_running_in_container() {
docker_config_path
Expand All @@ -163,16 +317,19 @@ async fn update(context: Context<impl Environment>, calcite_ref_singleton: &Calc
// We want to detect this scenario and retry, or fail if we are unable to.
// We do that with a few attempts.
for _attempt in 1..=UPDATE_ATTEMPTS {
let existing_configuration =
parse_configuration(config_path).await?;
let existing_configuration = parse_configuration(config_path).await?;
init_jvm(&existing_configuration);

let output =
introspect(existing_configuration.clone(), config_path, &context.environment, calcite_ref_singleton).await?;
let output = introspect(
existing_configuration.clone(),
config_path,
&context.environment,
calcite_ref_singleton,
)
.await?;

// Check that the input file did not change since we started introspecting,
let input_again_before_write =
parse_configuration(config_path).await?;
let input_again_before_write = parse_configuration(config_path).await?;

// and skip this attempt if it has.
if input_again_before_write == existing_configuration {
Expand Down
3 changes: 2 additions & 1 deletion crates/cli/src/metadata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,14 @@ pub struct PrebuiltDockerImagePackaging {
pub docker_image: String,
}

#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct EnvironmentVariableDefinition {
pub name: String,
pub description: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub default_value: Option<String>,
pub required: bool
}

#[derive(Debug, Serialize, Deserialize)]
Expand Down

0 comments on commit 9f13e9c

Please sign in to comment.