Skip to content

Commit

Permalink
Add github actions for tf lint check (#296)
Browse files Browse the repository at this point in the history
Add github actions for tf lint check
  • Loading branch information
umeshkumhar committed Mar 6, 2024
1 parent 13be33c commit ffd5a19
Show file tree
Hide file tree
Showing 12 changed files with 54 additions and 19 deletions.
35 changes: 35 additions & 0 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
name: Terraform CI
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
Terraform-Lint-Check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: hashicorp/setup-terraform@v3
with:
terraform_version: "1.5.7"

- name: Terraform fmt
id: fmt
run: terraform fmt -check -recursive

- name: Terraform Init
id: init
run: |
terraform -chdir=applications/rag init
terraform -chdir=applications/ray init
terraform -chdir=applications/jupyter init
- name: Terraform Validate
id: validate
run: |
terraform -chdir=applications/rag validate -no-color
terraform -chdir=applications/ray validate -no-color
terraform -chdir=applications/jupyter validate -no-color
8 changes: 4 additions & 4 deletions applications/jupyter/workloads-without-iap.example.tfvars
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,10 @@ cluster_membership_id = "" # required only for private clusters, default: cluste
#######################################################

## JupyterHub variables
namespace = "jupyter"
gcs_bucket = "<gcs-bucket>"
create_gcs_bucket = true
workload_identity_service_account = "jupyter-service-account"
namespace = "jupyter"
gcs_bucket = "<gcs-bucket>"
create_gcs_bucket = true
workload_identity_service_account = "jupyter-service-account"

# Jupyterhub without IAP
add_auth = false
4 changes: 2 additions & 2 deletions applications/rag/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -265,9 +265,9 @@ variable "autopilot_cluster" {
}

variable "cloudsql_instance" {
type = string
type = string
description = "Name of the CloudSQL instance for RAG VectorDB"
default = "pgvector-instance"
default = "pgvector-instance"
}

variable "cpu_pools" {
Expand Down
2 changes: 1 addition & 1 deletion applications/rag/workloads.tfvars
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ rag_service_account = "rag-system-account"

# Creates a google service account & k8s service account & configures workload identity with appropriate permissions.
# Set to false & update the variable `jupyter_service_account` to use an existing IAM service account.
jupyter_service_account = "jupyter-system-account"
jupyter_service_account = "jupyter-system-account"

## Embeddings table name - change this to the TABLE_NAME used in the notebook.
dataset_embeddings_table_name = "googlemaps_reviews_db"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
terraform {
required_providers {
google = {
source = "hashicorp/google"
source = "hashicorp/google"
}
kubernetes = {
source = "hashicorp/kubernetes"
Expand Down
4 changes: 2 additions & 2 deletions applications/ray/versions.tf
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@
terraform {
required_providers {
google = {
source = "hashicorp/google"
source = "hashicorp/google"
}
google-beta = {
source = "hashicorp/google-beta"
source = "hashicorp/google-beta"
}
helm = {
source = "hashicorp/helm"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,5 +21,5 @@ tokenizer = "tiiuae/falcon-7b"
# Benchmark configuration for triggering single test via Locust Runner
test_duration = 60
# Increase test_users to allow more parallelism (especially when testing HPA)
test_users = 1
test_rate = 5
test_users = 1
test_rate = 5
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ gpu_pools = [{
accelerator_count = 2
accelerator_type = "nvidia-tesla-t4"
gpu_driver_version = "LATEST"
},
{
},
{
name = "gpu-pool-l4"
machine_type = "g2-standard-24"
node_locations = "us-central1-a"
Expand Down
4 changes: 2 additions & 2 deletions modules/iap/iap.tf
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ resource "helm_release" "iap_jupyter" {
name = "iap-jupyter"
chart = "${path.module}/charts/iap/"
namespace = var.namespace
create_namespace = true
create_namespace = true
# timeout increased to support autopilot scaling resources, and give enough time to complete the deployment
timeout = 1200
set {
Expand Down Expand Up @@ -108,7 +108,7 @@ resource "helm_release" "iap_frontend" {
name = "iap-frontend"
chart = "${path.module}/charts/iap/"
namespace = var.namespace
create_namespace = true
create_namespace = true
# timeout increased to support autopilot scaling resources, and give enough time to complete the deployment
timeout = 1200
set {
Expand Down
2 changes: 1 addition & 1 deletion modules/iap/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ variable "jupyter_k8s_backend_service_name" {
variable "jupyter_k8s_backend_service_port" {
type = number
description = "Name of the Backend Service Port"
default = 80
default = 80
}

variable "jupyter_url_domain_addr" {
Expand Down
2 changes: 1 addition & 1 deletion modules/kuberay-monitoring/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ resource "helm_release" "grafana" {
}

data "kubernetes_service" "example" {
count = var.enable_grafana_on_ray_dashboard ? 1 : 0
count = var.enable_grafana_on_ray_dashboard ? 1 : 0
metadata {
name = "grafana"
namespace = var.namespace
Expand Down
2 changes: 1 addition & 1 deletion tutorials/hf-tgi/outputs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -24,5 +24,5 @@ output "inference_service_namespace" {

output "inference_service_endpoint" {
description = "Endpoint of model inference service"
value = kubernetes_service.inference_service.status != null ? (kubernetes_service.inference_service.status[0].load_balancer != null ? "${kubernetes_service.inference_service.status[0].load_balancer[0].ingress[0].ip}" : "") : ""
value = kubernetes_service.inference_service.status != null ? (kubernetes_service.inference_service.status[0].load_balancer != null ? "${kubernetes_service.inference_service.status[0].load_balancer[0].ingress[0].ip}" : "") : ""
}

0 comments on commit ffd5a19

Please sign in to comment.