diff --git a/Dockerfile b/Dockerfile index bb3eef6..07c68d7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,33 +1,20 @@ # Use an official Python runtime as a parent image -FROM python:3.11-slim +FROM python:3.11 RUN apt-get update RUN apt-get install -y cron -# Install Poetry -RUN pip install poetry - -ENV POETRY_NO_INTERACTION=1 \ - POETRY_VIRTUALENVS_IN_PROJECT=1 \ - POETRY_VIRTUALENVS_CREATE=1 \ - POETRY_CACHE_DIR=/tmp/poetry_cache - -# Set the working directory in the container to /app +# Set the working directory inside the container WORKDIR /app -# Copy the Python script and poetry files (pyproject.toml, poetry.lock) into the container at /app -COPY main.py pyproject.toml poetry.lock crontab /app/ +# Copy the requirements file to the working directory +COPY requirements.txt . # Install project dependencies -RUN poetry install && rm -rf $POETRY_CACHE_DIR +RUN pip install --no-cache-dir -r requirements.txt -### Enable this if you want to run this as a one off process -## Run ecs_service_discovery.py when the container launches -#CMD ["python", "./main.py"] - -COPY main.py crontab /app/ - -RUN poetry install +# Copy the Python script and poetry files (pyproject.toml, poetry.lock) into the container at /app +COPY main.py crontab ./ # Add crontab file in the cron directory ADD crontab /etc/cron.d/ecs-service-discovery-cron diff --git a/crontab b/crontab index 97d5c8e..a563892 100644 --- a/crontab +++ b/crontab @@ -1,4 +1,4 @@ # Run ecs_service_discovery.py every 3 minutes -*/3 * * * * python /app/main.py --cluster_name="" --output_dir /shared_volume/ --scrape_port 9097 >> /var/log/cron.log 2>&1 +* * * * * /usr/local/bin/python /app/main.py --cluster_name="" --output_dir /shared_volume/ --scrape_port 9097 --region "ap-south-1" >> /var/log/cron.log 2>&1 # An empty line is required at the end of this file for a valid cron file. diff --git a/docker-compose.yaml b/docker-compose.yaml index 220c64e..b9dd84c 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -8,6 +8,7 @@ services: context: . dockerfile: Dockerfile volumes: + - ~/.aws:/root/.aws:ro - ./shared_volume:/shared_volume network_mode: host @@ -18,8 +19,8 @@ services: - "8429:8429" volumes: - ./vmagentdata:/vmagentdata - - ./prometheus.yml:/etc/vmagent/vmagent.yaml - - ./shared_volume:/shared_volume/ecs_file_sd_config.json + - ./vmagent.yaml:/etc/vmagent/vmagent.yaml + - ./shared_volume:/shared_volume/ command: - "--promscrape.config=/etc/vmagent/vmagent.yaml" - "--remoteWrite.url=" diff --git a/main.py b/main.py index e2c8ee9..cf69ccc 100644 --- a/main.py +++ b/main.py @@ -2,14 +2,14 @@ import json import argparse import os -from botocore.exceptions import ClientError +from botocore.exceptions import ClientError, NoCredentialsError -def assume_role(arn, session_name): +def assume_role(arn, session_name, region): if not arn: # If no role ARN provided, return None to use default credentials return None - sts_client = boto3.client('sts') + sts_client = boto3.client('sts', region_name=region) try: assumed_role = sts_client.assume_role(RoleArn=arn, RoleSessionName=session_name) credentials = assumed_role['Credentials'] @@ -19,32 +19,35 @@ def assume_role(arn, session_name): return None -def get_ecs_services(cluster_name, credentials=None): +def get_ecs_services(cluster_name, region, credentials=None): if credentials: ecs = boto3.client( 'ecs', + region_name=region, aws_access_key_id=credentials['AccessKeyId'], aws_secret_access_key=credentials['SecretAccessKey'], aws_session_token=credentials['SessionToken'] ) else: - ecs = boto3.client('ecs') + ecs = boto3.client('ecs', region_name=region) services = ecs.list_services(cluster=cluster_name)['serviceArns'] detailed_services = ecs.describe_services(cluster=cluster_name, services=services) return detailed_services['services'] -def get_task_ips(cluster_name, service_name, credentials=None): +def get_task_ips(cluster_name, service_name, region, credentials=None): if credentials: ecs = boto3.client( 'ecs', + region_name=region, aws_access_key_id=credentials['AccessKeyId'], aws_secret_access_key=credentials['SecretAccessKey'], - aws_session_token=credentials['SessionToken'] + aws_session_token=credentials['SessionToken'], + aws_region=credentials['Region'] ) else: - ecs = boto3.client('ecs') + ecs = boto3.client('ecs', region_name=region ) # List tasks for the given service task_arns = ecs.list_tasks(cluster=cluster_name, serviceName=service_name)['taskArns'] @@ -68,29 +71,38 @@ def main(): parser.add_argument('--output_dir', type=str, help='Directory to output the JSON file', required=True) parser.add_argument('--role_arn', type=str, default=None, help='ARN of the role to assume (optional)') parser.add_argument('--scrape_port', type=str, default=None, help='Port number of the Scrape service', required=True) + parser.add_argument('--region', type=str, default=None, help='AWS Region', required=True) + args = parser.parse_args() - credentials = assume_role(args.role_arn, 'ecs_sd_script') if args.role_arn else None + credentials = assume_role(args.role_arn, args.region, 'ecs_sd_script') if args.role_arn else None file_sd_config = [] - for service in get_ecs_services(args.cluster_name, credentials): - service_name = service['serviceName'] - ips = get_task_ips(args.cluster_name, service_name, credentials) - targets = [f"{ip}:{args.scrape_port}" for ip in ips] - - file_sd_config.append({ - "targets": targets, - "labels": { - "job": service_name, - "ecs_cluster": args.cluster_name, - "ecs_service_name": service_name - } - }) - - output_file = os.path.join(args.output_dir, 'ecs_file_sd_config.json') - with open(output_file, 'w') as file: - json.dump(file_sd_config, file, indent=4) + for service in get_ecs_services(args.cluster_name, args.region, credentials): + try: + service_name = service['serviceName'] + ips = get_task_ips(args.cluster_name, service_name, args.region, credentials) + targets = [f"{ip}:{args.scrape_port}" for ip in ips] + + file_sd_config.append({ + "targets": targets, + "labels": { + "job": service_name, + "ecs_cluster": args.cluster_name, + "ecs_service_name": service_name + } + }) + except NoCredentialsError as e: + print(f"An error occurred: {e}") + file_sd_config.append({ + "targets": [], + "labels": {} + }) + finally: + output_file = os.path.join(args.output_dir, 'ecs_file_sd_config.json') + with open(output_file, 'w') as file: + json.dump(file_sd_config, file, indent=4) if __name__ == "__main__": diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 0d8e80d..0000000 --- a/poetry.lock +++ /dev/null @@ -1,125 +0,0 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. - -[[package]] -name = "argparse" -version = "1.4.0" -description = "Python command-line parsing library" -optional = false -python-versions = "*" -files = [ - {file = "argparse-1.4.0-py2.py3-none-any.whl", hash = "sha256:c31647edb69fd3d465a847ea3157d37bed1f95f19760b11a47aa91c04b666314"}, - {file = "argparse-1.4.0.tar.gz", hash = "sha256:62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4"}, -] - -[[package]] -name = "boto3" -version = "1.34.26" -description = "The AWS SDK for Python" -optional = false -python-versions = ">= 3.8" -files = [ - {file = "boto3-1.34.26-py3-none-any.whl", hash = "sha256:881b07d0d55e5d85b62e6c965efcb2820bdfbd8f23a73a7bc9dac3a4997a1343"}, - {file = "boto3-1.34.26.tar.gz", hash = "sha256:0491a65e55de999d07f42bb28ff6a38bad493934154b6304fcdfb4699a612d6c"}, -] - -[package.dependencies] -botocore = ">=1.34.26,<1.35.0" -jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.10.0,<0.11.0" - -[package.extras] -crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] - -[[package]] -name = "botocore" -version = "1.34.26" -description = "Low-level, data-driven core of boto 3." -optional = false -python-versions = ">= 3.8" -files = [ - {file = "botocore-1.34.26-py3-none-any.whl", hash = "sha256:4f3df0f6ed722e944d6f0eed964bc00b6489e50c6e8d5fdbbb68eb0c6c16c7c9"}, - {file = "botocore-1.34.26.tar.gz", hash = "sha256:63543102467b3b5ba73903f11a14c3157ee442a360f3cb2f5316a8d6bc3e10e7"}, -] - -[package.dependencies] -jmespath = ">=0.7.1,<2.0.0" -python-dateutil = ">=2.1,<3.0.0" -urllib3 = {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""} - -[package.extras] -crt = ["awscrt (==0.19.19)"] - -[[package]] -name = "jmespath" -version = "1.0.1" -description = "JSON Matching Expressions" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, - {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, -] - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "s3transfer" -version = "0.10.0" -description = "An Amazon S3 Transfer Manager" -optional = false -python-versions = ">= 3.8" -files = [ - {file = "s3transfer-0.10.0-py3-none-any.whl", hash = "sha256:3cdb40f5cfa6966e812209d0994f2a4709b561c88e90cf00c2696d2df4e56b2e"}, - {file = "s3transfer-0.10.0.tar.gz", hash = "sha256:d0c8bbf672d5eebbe4e57945e23b972d963f07d82f661cabf678a5c88831595b"}, -] - -[package.dependencies] -botocore = ">=1.33.2,<2.0a.0" - -[package.extras] -crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "urllib3" -version = "2.0.7" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.7" -files = [ - {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, - {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.11" -content-hash = "0028fc1eaafb535a47bc426050248ad07b99bd3fa68a500ae93bad723328d659" diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index b131964..0000000 --- a/pyproject.toml +++ /dev/null @@ -1,17 +0,0 @@ -[tool.poetry] -name = "prometheus-ecs-sd" -version = "0.1.0" -description = "This generates a Prometheus `file_sd_config` compatible yaml file with all the metadata required by Prometheus to iscrape ECS containers." -authors = ["Aniket Rao "] -license = "MIT" -readme = "README.md" - -[tool.poetry.dependencies] -python = "^3.11" -boto3 = "^1.34.26" -argparse = "^1.4.0" - - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..0fedf95 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,7 @@ +boto3==1.34.58 +botocore==1.34.58 +jmespath==1.0.1 +python-dateutil==2.9.0.post0 +s3transfer==0.10.0 +six==1.16.0 +urllib3==2.0.7 diff --git a/vmagent.yaml b/vmagent.yaml new file mode 100644 index 0000000..da532d4 --- /dev/null +++ b/vmagent.yaml @@ -0,0 +1,12 @@ +global: + scrape_interval: 1m + scrape_timeout: 20s +# Check https://prometheus.io/docs/prometheus/latest/configuration/configuration for more details +scrape_configs: + - job_name: "ecs_service_sraper" + file_sd_configs: + - files: + - /shared_volume/ecs_file_sd_config.json + - job_name: "vmagent" + static_configs: + - targets: [ "localhost:8429" ]