Skip to content

Commit

Permalink
Merge pull request #330 from unoplat/329-docker-compose-for-prod-testing
Browse files Browse the repository at this point in the history
feat: Enhance Docker Compose Configuration for Code Confluence Flow B…
  • Loading branch information
JayGhiya authored Feb 8, 2025
2 parents d5d1281 + dcdc313 commit 9e96442
Show file tree
Hide file tree
Showing 4 changed files with 186 additions and 18 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
version: "3.5"

# Add default config
configs:
temporal-dynamic-config:
content: |
system.forceSearchAttributesCacheRefreshOnRead:
- value: true
constraints: {}
limit.maxIDLength:
- value: 255
constraints: {}
services:
elasticsearch:
container_name: temporal-elasticsearch
environment:
- cluster.routing.allocation.disk.threshold_enabled=true
- cluster.routing.allocation.disk.watermark.low=512mb
- cluster.routing.allocation.disk.watermark.high=256mb
- cluster.routing.allocation.disk.watermark.flood_stage=128mb
- discovery.type=single-node
- ES_JAVA_OPTS=-Xms256m -Xmx256m
- xpack.security.enabled=false
image: elasticsearch:7.17.27
networks:
- temporal-network
expose:
- 9200
volumes:
- /var/lib/elasticsearch/data
postgresql:
container_name: temporal-postgresql
environment:
POSTGRES_PASSWORD: temporal
POSTGRES_USER: temporal
image: postgres:13
networks:
- temporal-network
expose:
- 5432
volumes:
- /var/lib/postgresql/data
temporal:
container_name: temporal
depends_on:
- postgresql
- elasticsearch
environment:
- DB=postgres12
- DB_PORT=5432
- POSTGRES_USER=temporal
- POSTGRES_PWD=temporal
- POSTGRES_SEEDS=postgresql
- DYNAMIC_CONFIG_FILE_PATH=config/dynamicconfig/development-sql.yaml
- ENABLE_ES=true
- ES_SEEDS=elasticsearch
- ES_VERSION=v7
- TEMPORAL_CLI_ADDRESS=temporal:7233
image: temporalio/auto-setup:1.26.2
networks:
- temporal-network
ports:
- 7233:7233
configs:
- source: temporal-dynamic-config
target: /etc/temporal/config/dynamicconfig/development-sql.yaml
healthcheck:
test: ["CMD-SHELL", "tctl cluster health | grep -i SERVING || exit 1"]
interval: 10s
timeout: 5s
retries: 5
start_period: 30s
temporal-admin-tools:
container_name: temporal-admin-tools
depends_on:
- temporal
environment:
- TEMPORAL_ADDRESS=temporal:7233
- TEMPORAL_CLI_ADDRESS=temporal:7233
image: temporalio/admin-tools:1.26.2
networks:
- temporal-network
stdin_open: true
tty: true
temporal-ui:
container_name: temporal-ui
depends_on:
- temporal
environment:
- TEMPORAL_ADDRESS=temporal:7233
- TEMPORAL_CORS_ORIGINS=http://localhost:3000
- TEMPORAL_CSRF_COOKIE_INSECURE=True
image: temporalio/ui:2.34.0
networks:
- temporal-network
ports:
- 8080:8080
neo4j:
container_name: neo4j
image: graphstack/dozerdb:5.25.1.0-alpha.1
ports:
- "7474:7474"
- "7687:7687"
volumes:
- ${HOME}/neo4j/data:/data
- ${HOME}/neo4j/logs:/logs
- ${HOME}/neo4j/import:/var/lib/neo4j/import
- ${HOME}/neo4j/plugins:/plugins
environment:
NEO4J_AUTH: neo4j/password
NEO4J_PLUGINS: '["apoc"]'
NEO4J_apoc_export_file_enabled: "true"
NEO4J_apoc_import_file_enabled: "true"
NEO4J_dbms_security_procedures_unrestricted: "*"
networks:
- temporal-network
healthcheck:
test: ["CMD-SHELL", "neo4j status | grep -q 'Neo4j is running' || exit 1"]
interval: 5s
timeout: 5s
retries: 5
code-confluence-flow-bridge:
container_name: code-confluence-flow-bridge
environment:
- NEO4J_HOST=neo4j
- NEO4J_PORT=7687
- NEO4J_USERNAME=neo4j
- NEO4J_PASSWORD=password
- TEMPORAL_SERVER_ADDRESS=temporal:7233
image: ghcr.io/unoplat/code-confluence-flow-bridge:0.8.0
depends_on:
temporal:
condition: service_healthy
neo4j:
condition: service_healthy
ports:
- "8000:8000"
networks:
- temporal-network
stdin_open: true
tty: true

networks:
temporal-network:
driver: bridge
name: temporal-network
# volumes:
# temporal-shared-data: # Define the shared volume
# name: temporal-shared-data # Optional: explicitly name the volume
Original file line number Diff line number Diff line change
@@ -1,4 +1,16 @@
version: "3.5"

# Add default config
configs:
temporal-dynamic-config:
content: |
system.forceSearchAttributesCacheRefreshOnRead:
- value: true
constraints: {}
limit.maxIDLength:
- value: 255
constraints: {}
services:
elasticsearch:
container_name: temporal-elasticsearch
Expand All @@ -10,7 +22,7 @@ services:
- discovery.type=single-node
- ES_JAVA_OPTS=-Xms256m -Xmx256m
- xpack.security.enabled=false
image: elasticsearch:${ELASTICSEARCH_VERSION}
image: elasticsearch:7.17.27
networks:
- temporal-network
expose:
Expand All @@ -22,7 +34,7 @@ services:
environment:
POSTGRES_PASSWORD: temporal
POSTGRES_USER: temporal
image: postgres:${POSTGRESQL_VERSION}
image: postgres:13
networks:
- temporal-network
expose:
Expand All @@ -40,18 +52,18 @@ services:
- POSTGRES_USER=temporal
- POSTGRES_PWD=temporal
- POSTGRES_SEEDS=postgresql
- DYNAMIC_CONFIG_FILE_PATH=config/dynamicconfig/development-sql.yaml
- ENABLE_ES=true
- ES_SEEDS=elasticsearch
- ES_VERSION=v7
- TEMPORAL_CLI_ADDRESS=temporal:7233
image: temporalio/auto-setup:${TEMPORAL_VERSION}
image: temporalio/auto-setup:1.26.2
networks:
- temporal-network
ports:
- 7233:7233
volumes:
- ./dynamicconfig:/etc/temporal/config/dynamicconfig
configs:
- source: temporal-dynamic-config
target: /etc/temporal/config/dynamicconfig/development-sql.yaml
healthcheck:
test: ["CMD-SHELL", "tctl cluster health | grep -i SERVING || exit 1"]
interval: 10s
Expand All @@ -65,7 +77,7 @@ services:
environment:
- TEMPORAL_ADDRESS=temporal:7233
- TEMPORAL_CLI_ADDRESS=temporal:7233
image: temporalio/admin-tools:${TEMPORAL_ADMINTOOLS_VERSION}
image: temporalio/admin-tools:1.26.2
networks:
- temporal-network
stdin_open: true
Expand All @@ -78,7 +90,7 @@ services:
- TEMPORAL_ADDRESS=temporal:7233
- TEMPORAL_CORS_ORIGINS=http://localhost:3000
- TEMPORAL_CSRF_COOKIE_INSECURE=True
image: temporalio/ui:${TEMPORAL_UI_VERSION}
image: temporalio/ui:2.34.0
networks:
- temporal-network
ports:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def clone_repository(self, repository_settings: RepositorySettings, github_token
github_repo = github_client.get_repo(repo_path)

# Create local directory if it doesn't exist
local_path: str = os.path.join(os.path.expanduser("~"), ".unoplat", "repositories")
local_path = os.path.join(os.path.expanduser("~"), ".unoplat", "repositories")
os.makedirs(local_path, exist_ok=True)
# Reassign repo_path to the local clone path
repo_path = os.path.join(local_path, repo_name)
Expand All @@ -62,7 +62,9 @@ def clone_repository(self, repository_settings: RepositorySettings, github_token
if not os.path.exists(repo_path):
Repo.clone_from(repo_url, repo_path)
# Activity-based logging: log the repository's local clone path
logger.info(f"Temporal Repository is available at local path: {repo_path} in activity {activity.info}")
activity.logger.info(f"[Temporal] Repository is available at local path: {repo_path}")
# Pass-through logging using Loguru's logger
logger.info(f"[Temporal-Python @Web] Repository is available at local path: {repo_path}")

# Get repository metadata
repo_metadata: Dict[str, Any] = {
Expand Down Expand Up @@ -102,7 +104,9 @@ def clone_repository(self, repository_settings: RepositorySettings, github_token
raise Exception("Root package should be specified for python codebases")

# NEW: Log the computed local path for the codebase
logger.info(f"[Temporal] Codebase local path computed as: {local_path}")
activity.logger.info(f"[Temporal] Codebase local path computed as: {local_path}")
# Pass-through logging using Loguru's logger
logger.info(f"[Temporal-Python @Web] Codebase local path computed as: {local_path}")

programming_language_metadata: ProgrammingLanguageMetadata = codebase_config.programming_language_metadata
# Verify the path exists
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from datetime import timedelta

from loguru import logger
from temporalio import workflow

with workflow.unsafe.imports_passed_through():
Expand All @@ -10,6 +9,9 @@
from src.code_confluence_flow_bridge.processor.package_metadata_activity.package_manager_metadata_activity import PackageMetadataActivity
from src.code_confluence_flow_bridge.processor.package_metadata_activity.package_manager_metadata_ingestion import PackageManagerMetadataIngestion





@workflow.defn(name="child-codebase-workflow")
class CodebaseChildWorkflow:
Expand All @@ -19,23 +21,23 @@ def __init__(self):
@workflow.run
async def run(self, repository_qualified_name: str, codebase_qualified_name: str, local_path: str, source_directory: str, package_manager_metadata: UnoplatPackageManagerMetadata) -> None:
"""Execute the codebase workflow"""
logger.info(f"Starting codebase workflow for {codebase_qualified_name}")
workflow.logger.info(f"Starting codebase workflow for {codebase_qualified_name}")

# 1. Parse package metadata
logger.info(f"Creating programming language metadata for {package_manager_metadata.programming_language}")
workflow.logger.info(f"Creating programming language metadata for {package_manager_metadata.programming_language}")
programming_language_metadata = ProgrammingLanguageMetadata(language=ProgrammingLanguage(package_manager_metadata.programming_language.lower()), package_manager=PackageManagerType(package_manager_metadata.package_manager.lower()), language_version=package_manager_metadata.programming_language_version)

logger.info(f"Parsing package metadata for {codebase_qualified_name}")
workflow.logger.info(f"Parsing package metadata for {codebase_qualified_name}")
parsed_metadata: UnoplatPackageManagerMetadata = await workflow.execute_activity(activity=PackageMetadataActivity.get_package_metadata, args=[source_directory, programming_language_metadata], start_to_close_timeout=timedelta(minutes=10))

# 2. Ingest package metadata into graph
logger.info(f"Ingesting package metadata for {codebase_qualified_name} into graph")
workflow.logger.info(f"Ingesting package metadata for {codebase_qualified_name} into graph")
await workflow.execute_activity(activity=PackageManagerMetadataIngestion.insert_package_manager_metadata, args=[codebase_qualified_name, parsed_metadata], start_to_close_timeout=timedelta(minutes=10))

programming_language_metadata.language_version = parsed_metadata.programming_language_version

# 3. Process codebase (linting, AST generation, parsing)
logger.info(f"Processing codebase for {codebase_qualified_name}")
workflow.logger.info(f"Processing codebase for {codebase_qualified_name}")
await workflow.execute_activity(
activity=CodebaseProcessingActivity.process_codebase,
args=[
Expand All @@ -49,6 +51,6 @@ async def run(self, repository_qualified_name: str, codebase_qualified_name: str
start_to_close_timeout=timedelta(minutes=30)
)

logger.info(f"Codebase workflow completed successfully for {codebase_qualified_name}")
workflow.logger.info(f"Codebase workflow completed successfully for {codebase_qualified_name}")


0 comments on commit 9e96442

Please sign in to comment.