diff --git a/.adr-dir b/.adr-dir new file mode 100644 index 00000000..0d38988a --- /dev/null +++ b/.adr-dir @@ -0,0 +1 @@ +doc/architecture/decisions diff --git a/.github/CODE_OF_CONDUCT.md b/.github/CODE_OF_CONDUCT.md index 92a0f068..33a68541 100644 --- a/.github/CODE_OF_CONDUCT.md +++ b/.github/CODE_OF_CONDUCT.md @@ -68,7 +68,7 @@ offensive, or harmful. This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of -representing a project or community include using an official project e-mail +representing a project or community include using an official project email address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. @@ -76,9 +76,9 @@ further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported by contacting the EGI Foundation team at contact@egi.eu. The team will -review and investigate all complaints, and will respond in a way that it deems -appropriate to the circumstances. The team is obligated to maintain +reported by contacting the [EGI Foundation team](mailto:contact@egi.eu). The +team will review and investigate all complaints, and will respond in a way that +it deems appropriate to the circumstances. The team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. diff --git a/.github/workflows/deploy.yml b/.github/workflows/cloud-info-deploy.yml similarity index 73% rename from .github/workflows/deploy.yml rename to .github/workflows/cloud-info-deploy.yml index e6f6a9ec..4e76eb64 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/cloud-info-deploy.yml @@ -7,7 +7,7 @@ on: - main pull_request: paths: - - "deploy/**" + - "/cloud-info/deploy/**" jobs: terraform: @@ -25,20 +25,20 @@ jobs: curl -L https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 > jq chmod +x jq pip install yq git+https://github.com/tdviet/fedcloudclient.git - curl -L https://github.com/oidc-mytoken/client/releases/download/v0.3.0/mytoken_0.3.0_Linux_x86_64.tar.gz \ - | tar -xzf - - mkdir ~/.mytoken - curl https://raw.githubusercontent.com/oidc-mytoken/client/master/config/example-config.yaml > ~/.mytoken/config.yaml - name: Configure providers access env: - MYTOKEN: ${{ secrets.MYTOKEN }} REFRESH_TOKEN: ${{ secrets.REFRESH_TOKEN }} + ANSIBLE_SECRETS: ${{ secrets.ANSIBLE_SECRETS }} run: | + # using parametric scopes to only have access to cloud.egi.eu VO + SCOPE="openid%20email%20profile%20voperson_id" + SCOPE="$SCOPE%20eduperson_entitlement:urn:mace:egi.eu:group:cloud.egi.eu:role=vm_operator#aai.egi.eu" + SCOPE="$SCOPE%20eduperson_entitlement:urn:mace:egi.eu:group:cloud.egi.eu:role=member#aai.egi.eu" OIDC_TOKEN=$(curl -X POST "https://aai.egi.eu/auth/realms/egi/protocol/openid-connect/token" \ - -d "grant_type=refresh_token&refresh_token=$REFRESH_TOKEN&client_id=token-portal&scope=openid%20email%20profile%20voperson_id%20eduperson_entitlement" \ + -d "grant_type=refresh_token&client_id=token-portal&scope=$SCOPE&refresh_token=$REFRESH_TOKEN" \ | jq -r ".access_token") echo "::add-mask::$OIDC_TOKEN" - cd deploy + cd cloud-info/deploy BACKEND_SITE="$(yq -r .clouds.backend.site clouds.yaml)" BACKEND_VO="$(yq -r .clouds.backend.vo clouds.yaml)" EGI_SITE="$(yq -r .clouds.deploy.site clouds.yaml)" @@ -54,6 +54,12 @@ jobs: sed -i -e "s/deploy_secret/$DEPLOY_OS_TOKEN/" clouds.yaml mkdir -p ~/.config/openstack touch ~/.config/openstack/secure.yaml + FEDCLOUD_LOCKER_TOKEN="$(fedcloud secret locker create \ + --oidc-access-token "$OIDC_TOKEN" \ + --ttl 1h --num-uses 2)" + echo "::add-mask::$FEDCLOUD_LOCKER_TOKEN" + fedcloud secret put --locker-token "$FEDCLOUD_LOCKER_TOKEN" deploy "data=$ANSIBLE_SECRETS" + echo "FEDCLOUD_LOCKER_TOKEN=$FEDCLOUD_LOCKER_TOKEN" >> "$GITHUB_ENV" - name: Setup Terraform uses: hashicorp/setup-terraform@v3 with: @@ -61,32 +67,29 @@ jobs: - name: Terraform Format id: fmt run: | - cd deploy + cd cloud-info/deploy terraform fmt -check - name: Terraform init id: init run: | - cd deploy + cd cloud-info/deploy terraform init - name: Adjust cloud-init file env: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} - ANSIBLE_SECRETS: ${{ secrets.ANSIBLE_SECRETS }} run: | - cd deploy + cd cloud-info/deploy sed -i -e "s/%TOKEN%/${{ secrets.GITHUB_TOKEN }}/" cloud-init.yaml sed -i -e "s/%REF%/${{ github.sha }}/" cloud-init.yaml sed -i -e "s/%SHORT_REF%/$(git rev-parse --short HEAD)/" cloud-init.yaml sed -i -e "s#%SLACK_WEBHOOK_URL%#$SLACK_WEBHOOK_URL#" cloud-init.yaml - ANSIBLE_ENCODED_SECRETS="$(echo "$ANSIBLE_SECRETS" | base64 -w 0)" - echo "::add-mask::$ANSIBLE_ENCODED_SECRETS" - sed -i -e "s/%ANSIBLE_SECRETS%/$ANSIBLE_ENCODED_SECRETS/" cloud-init.yaml + sed -i -e "s/%FEDCLOUD_LOCKER_TOKEN%/$FEDCLOUD_LOCKER_TOKEN/" cloud-init.yaml sed -i -e "s/%CLOUDS_YAML%/$(base64 -w 0 < clouds.yaml)/" cloud-init.yaml - name: terraform plan id: plan if: github.event_name == 'pull_request' run: | - cd deploy + cd cloud-info/deploy terraform plan -no-color -var-file="$EGI_SITE.tfvars" continue-on-error: true - name: Update Pull Request @@ -122,29 +125,34 @@ jobs: id: terraform-apply if: github.ref == 'refs/heads/main' && github.event_name == 'push' run: | - cd deploy + cd cloud-info/deploy terraform apply -auto-approve -var-file="$EGI_SITE.tfvars" - name: Get VM ID id: terraform-vm-id if: github.ref == 'refs/heads/main' && github.event_name == 'push' run: | - cd deploy + cd cloud-info/deploy terraform output -raw instance-id - name: Re-configure providers access env: - MYTOKEN: ${{ secrets.MYTOKEN }} REFRESH_TOKEN: ${{ secrets.REFRESH_TOKEN }} run: | + # using parametric scopes to only have access to cloud.egi.eu VO + SCOPE="openid%20email%20profile%20voperson_id" + SCOPE="$SCOPE%20eduperson_entitlement:urn:mace:egi.eu:group:cloud.egi.eu:role=vm_operator#aai.egi.eu" + SCOPE="$SCOPE%20eduperson_entitlement:urn:mace:egi.eu:group:cloud.egi.eu:role=member#aai.egi.eu" OIDC_TOKEN=$(curl -X POST "https://aai.egi.eu/auth/realms/egi/protocol/openid-connect/token" \ - -d "grant_type=refresh_token&refresh_token=$REFRESH_TOKEN&client_id=token-portal&scope=openid%20email%20profile%20voperson_id%20eduperson_entitlement" \ + -d "grant_type=refresh_token&refresh_token=$REFRESH_TOKEN&client_id=token-portal&scope=$SCOPE" \ | jq -r ".access_token") echo "::add-mask::$OIDC_TOKEN" - cd deploy + cd cloud-info/deploy + git checkout -- clouds.yaml BACKEND_SITE="$(yq -r .clouds.backend.site clouds.yaml)" BACKEND_VO="$(yq -r .clouds.backend.vo clouds.yaml)" BACKEND_OS_TOKEN="$(fedcloud openstack token issue --oidc-access-token "$OIDC_TOKEN" \ --site "$BACKEND_SITE" --vo "$BACKEND_VO" -j | jq -r '.[0].Result.id')" echo "::add-mask::$BACKEND_OS_TOKEN" + echo "BACKEND_OS_TOKEN=$BACKEND_OS_TOKEN" >> "$GITHUB_ENV" sed -i -e "s/backend_secret/$BACKEND_OS_TOKEN/" clouds.yaml mkdir -p ~/.config/openstack touch ~/.config/openstack/secure.yaml @@ -156,14 +164,13 @@ jobs: max_attempts: 20 retry_wait_seconds: 40 command: > - set -x && - pushd deploy && - openstack --os-cloud backend object save fedcloud-catchall "${{ steps.terraform-vm-id.outputs.stdout }}" && - openstack --os-cloud backend object delete fedcloud-catchall "${{ steps.terraform-vm-id.outputs.stdout }}" + pushd cloud-info/deploy && + openstack --os-cloud backend --os-token "$BACKEND_OS_TOKEN" object save fedcloud-catchall "${{ steps.terraform-vm-id.outputs.stdout }}" && + openstack --os-cloud backend --os-token "$BACKEND_OS_TOKEN" object delete fedcloud-catchall "${{ steps.terraform-vm-id.outputs.stdout }}" - name: Look for errors if: github.ref == 'refs/heads/main' && github.event_name == 'push' run: | - cd deploy + cd cloud-info/deploy # show the status in the build log cat "${{ steps.terraform-vm-id.outputs.stdout }}" grep -v "error" "${{ steps.terraform-vm-id.outputs.stdout }}" diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 32afcb45..d6c4db37 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -17,6 +17,7 @@ jobs: image: - cloud-info - caso + - atrope steps: - name: Checkout @@ -40,7 +41,7 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} if: github.event_name != 'pull_request' - name: Build - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: push: ${{ github.event_name != 'pull_request' }} tags: ${{ steps.docker_meta.outputs.tags }} diff --git a/.github/workflows/image-sync-deploy.yml b/.github/workflows/image-sync-deploy.yml new file mode 100644 index 00000000..3948e0d9 --- /dev/null +++ b/.github/workflows/image-sync-deploy.yml @@ -0,0 +1,176 @@ +--- +name: 'Deploy' + +on: + push: + branches: + - main + pull_request: + paths: + - "/image-sync/deploy/**" + +jobs: + terraform: + name: 'Terraform' + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup python + uses: actions/setup-python@v5 + with: + python-version: 3.x + - name: Install environment + run: | + curl -L https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 > jq + chmod +x jq + pip install yq git+https://github.com/tdviet/fedcloudclient.git + - name: Configure providers access + env: + REFRESH_TOKEN: ${{ secrets.REFRESH_TOKEN }} + ANSIBLE_SECRETS: ${{ secrets.ANSIBLE_SECRETS }} + run: | + # using parametric scopes to only have access to cloud.egi.eu VO + SCOPE="openid%20email%20profile%20voperson_id" + SCOPE="$SCOPE%20eduperson_entitlement:urn:mace:egi.eu:group:cloud.egi.eu:role=vm_operator#aai.egi.eu" + SCOPE="$SCOPE%20eduperson_entitlement:urn:mace:egi.eu:group:cloud.egi.eu:role=member#aai.egi.eu" + OIDC_TOKEN=$(curl -X POST "https://aai.egi.eu/auth/realms/egi/protocol/openid-connect/token" \ + -d "grant_type=refresh_token&client_id=token-portal&scope=$SCOPE&refresh_token=$REFRESH_TOKEN" \ + | jq -r ".access_token") + echo "::add-mask::$OIDC_TOKEN" + cd image-sync/deploy + BACKEND_SITE="$(yq -r .clouds.backend.site clouds.yaml)" + BACKEND_VO="$(yq -r .clouds.backend.vo clouds.yaml)" + EGI_SITE="$(yq -r .clouds.deploy.site clouds.yaml)" + DEPLOY_VO="$(yq -r .clouds.deploy.vo clouds.yaml)" + echo "EGI_SITE=$EGI_SITE" >> "$GITHUB_ENV" + BACKEND_OS_TOKEN="$(fedcloud openstack token issue --oidc-access-token "$OIDC_TOKEN" \ + --site "$BACKEND_SITE" --vo "$BACKEND_VO" -j | jq -r '.[0].Result.id')" + echo "::add-mask::$BACKEND_OS_TOKEN" + sed -i -e "s/backend_secret/$BACKEND_OS_TOKEN/" clouds.yaml + DEPLOY_OS_TOKEN="$(fedcloud openstack token issue --oidc-access-token "$OIDC_TOKEN" \ + --site "$EGI_SITE" --vo "$DEPLOY_VO" -j | jq -r '.[0].Result.id')" + echo "::add-mask::$DEPLOY_OS_TOKEN" + sed -i -e "s/deploy_secret/$DEPLOY_OS_TOKEN/" clouds.yaml + mkdir -p ~/.config/openstack + touch ~/.config/openstack/secure.yaml + FEDCLOUD_LOCKER_TOKEN="$(fedcloud secret locker create \ + --oidc-access-token "$OIDC_TOKEN" \ + --ttl 1h --num-uses 2)" + echo "::add-mask::$FEDCLOUD_LOCKER_TOKEN" + fedcloud secret put --locker-token "$FEDCLOUD_LOCKER_TOKEN" deploy "data=$ANSIBLE_SECRETS" + echo "FEDCLOUD_LOCKER_TOKEN=$FEDCLOUD_LOCKER_TOKEN" >> "$GITHUB_ENV" + - name: Setup Terraform + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: 1.2.9 + - name: Terraform Format + id: fmt + run: | + cd image-sync/deploy + terraform fmt -check + - name: Terraform init + id: init + run: | + cd image-sync/deploy + terraform init + - name: Adjust cloud-init file + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + run: | + cd image-sync/deploy + sed -i -e "s/%TOKEN%/${{ secrets.GITHUB_TOKEN }}/" cloud-init.yaml + sed -i -e "s/%REF%/${{ github.sha }}/" cloud-init.yaml + sed -i -e "s/%SHORT_REF%/$(git rev-parse --short HEAD)/" cloud-init.yaml + sed -i -e "s#%SLACK_WEBHOOK_URL%#$SLACK_WEBHOOK_URL#" cloud-init.yaml + sed -i -e "s/%FEDCLOUD_LOCKER_TOKEN%/$FEDCLOUD_LOCKER_TOKEN/" cloud-init.yaml + sed -i -e "s/%CLOUDS_YAML%/$(base64 -w 0 < clouds.yaml)/" cloud-init.yaml + - name: terraform plan + id: plan + if: github.event_name == 'pull_request' + run: | + cd image-sync/deploy + terraform plan -no-color -var-file="$EGI_SITE.tfvars" + continue-on-error: true + - name: Update Pull Request + uses: actions/github-script@v7 + if: github.event_name == 'pull_request' + env: + PLAN: "terraform\n${{ steps.plan.outputs.stdout }}" + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const output = `#### Terraform Format and Style 🖌\`${{ steps.fmt.outcome }}\` + #### Terraform Initialization ⚙️\`${{ steps.init.outcome }}\` + #### Terraform Plan 📖\`${{ steps.plan.outcome }}\` +
Show Plan + + \`\`\` + ${process.env.PLAN} + \`\`\` + +
+ + *Pusher: @${{ github.actor }}, Action: \`${{ github.event_name }}\`*`; + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: output + }) + - name: Terraform Plan Status + if: steps.plan.outcome == 'failure' + run: exit 1 + - name: Terraform Apply + id: terraform-apply + if: github.ref == 'refs/heads/main' && github.event_name == 'push' + run: | + cd image-sync/deploy + terraform apply -auto-approve -var-file="$EGI_SITE.tfvars" + - name: Get VM ID + id: terraform-vm-id + if: github.ref == 'refs/heads/main' && github.event_name == 'push' + run: | + cd image-sync/deploy + terraform output -raw instance-id + - name: Re-configure providers access + env: + REFRESH_TOKEN: ${{ secrets.REFRESH_TOKEN }} + run: | + # using parametric scopes to only have access to cloud.egi.eu VO + SCOPE="openid%20email%20profile%20voperson_id" + SCOPE="$SCOPE%20eduperson_entitlement:urn:mace:egi.eu:group:cloud.egi.eu:role=vm_operator#aai.egi.eu" + SCOPE="$SCOPE%20eduperson_entitlement:urn:mace:egi.eu:group:cloud.egi.eu:role=member#aai.egi.eu" + OIDC_TOKEN=$(curl -X POST "https://aai.egi.eu/auth/realms/egi/protocol/openid-connect/token" \ + -d "grant_type=refresh_token&refresh_token=$REFRESH_TOKEN&client_id=token-portal&scope=$SCOPE" \ + | jq -r ".access_token") + echo "::add-mask::$OIDC_TOKEN" + cd image-sync/deploy + git checkout -- clouds.yaml + BACKEND_SITE="$(yq -r .clouds.backend.site clouds.yaml)" + BACKEND_VO="$(yq -r .clouds.backend.vo clouds.yaml)" + BACKEND_OS_TOKEN="$(fedcloud openstack token issue --oidc-access-token "$OIDC_TOKEN" \ + --site "$BACKEND_SITE" --vo "$BACKEND_VO" -j | jq -r '.[0].Result.id')" + echo "::add-mask::$BACKEND_OS_TOKEN" + echo "BACKEND_OS_TOKEN=$BACKEND_OS_TOKEN" >> "$GITHUB_ENV" + sed -i -e "s/backend_secret/$BACKEND_OS_TOKEN/" clouds.yaml + mkdir -p ~/.config/openstack + touch ~/.config/openstack/secure.yaml + - name: Get the status file from swift + if: github.ref == 'refs/heads/main' && github.event_name == 'push' + uses: nick-fields/retry@v3 + with: + timeout_minutes: 10 + max_attempts: 20 + retry_wait_seconds: 40 + command: > + pushd image-sync/deploy && + openstack --os-cloud backend --os-token "$BACKEND_OS_TOKEN" object save fedcloud-catchall "${{ steps.terraform-vm-id.outputs.stdout }}" && + openstack --os-cloud backend --os-token "$BACKEND_OS_TOKEN" object delete fedcloud-catchall "${{ steps.terraform-vm-id.outputs.stdout }}" + - name: Look for errors + if: github.ref == 'refs/heads/main' && github.event_name == 'push' + run: | + cd image-sync/deploy + # show the status in the build log + cat "${{ steps.terraform-vm-id.outputs.stdout }}" + grep -v "error" "${{ steps.terraform-vm-id.outputs.stdout }}" diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index b5640659..d0ae11ab 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,7 +1,7 @@ --- name: Lint -on: [pull_request, push] +on: pull_request jobs: super-lint: @@ -12,9 +12,15 @@ jobs: # Checks out a copy of your repository on the ubuntu-latest machine - name: Checkout code uses: actions/checkout@v4 + with: + # Full git history needed to get proper list of changed files + fetch-depth: 0 # Runs the Super-Linter action - name: Run Super-Linter - uses: github/super-linter/slim@v5 + uses: github/super-linter/slim@v6 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # Disabling these two for the moment, should be enabled later + VALIDATE_CHECKOV: false + VALIDATE_PYTHON_PYLINT: false diff --git a/.github/workflows/molecule.yml b/.github/workflows/molecule.yml new file mode 100644 index 00000000..45453d56 --- /dev/null +++ b/.github/workflows/molecule.yml @@ -0,0 +1,26 @@ +--- +name: Test role + +on: [push, pull_request] + +jobs: + molecule: + name: Runs molecule for the ansible role + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '>=3.9' + - name: Install dependencies + run: | + pip install molecule molecule-plugins[docker] pytest pytest-testinfra + - name: Test Ansible Bootstrap + run: | + cd deploy/roles/catchall + molecule test + env: + PY_COLORS: 1 diff --git a/cloud-info/Dockerfile b/cloud-info/Dockerfile index c2d9569c..d66c6c8c 100644 --- a/cloud-info/Dockerfile +++ b/cloud-info/Dockerfile @@ -1,33 +1,65 @@ +FROM python:3 as build + +SHELL ["/bin/bash", "-o", "pipefail", "-c"] + +# hadolint ignore=DL3008 +RUN curl -s https://dist.eugridpma.info/distribution/igtf/current/GPG-KEY-EUGridPMA-RPM-3 \ + | apt-key add - \ + && echo "deb https://repository.egi.eu/sw/production/cas/1/current egi-igtf core" > /etc/apt/sources.list.d/igtf.list \ + && apt-get update \ + && apt-get install -y ca-policy-egi-core \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /cloud-info + +RUN python -m venv /cloud-info/venv +ENV PATH="/cloud-info/venv/bin:$PATH" + +COPY requirements.txt . + +RUN pip install --no-cache-dir -r requirements.txt \ + && cat /etc/grid-security/certificates/*.pem >> "$(python -m requests.certs)" + +COPY . . + +RUN pip install --no-cache-dir . + +# The actual image FROM python:3 LABEL org.opencontainers.image.source=https://github.com/EGI-Federation/fedcloud-catchall-operations SHELL ["/bin/bash", "-o", "pipefail", "-c"] -RUN mkdir /cloud-info -COPY requirements.txt /cloud-info/requirements.txt -RUN pip install --no-cache-dir -r /cloud-info/requirements.txt - -# CA certificates: install and add to python # hadolint ignore=DL3015, DL3008 -RUN curl -Ls \ - https://dist.eugridpma.info/distribution/igtf/current/GPG-KEY-EUGridPMA-RPM-3 \ - | apt-key add - \ - && echo 'deb http://repository.egi.eu/sw/production/cas/1/current egi-igtf core' \ - > /etc/apt/sources.list.d/cas.list \ - && apt-get update \ - && apt-get install -y jq \ - && apt-get install -y ca-policy-egi-core \ - && rm -rf /var/lib/apt/lists/* \ - && cat /etc/grid-security/certificates/*.pem >> "$(python -m requests.certs)" +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + jq rclone \ + && rm -rf /var/lib/apt/lists/* -COPY . /cloud-info/ -RUN pip install --no-cache-dir /cloud-info +RUN mkdir /cloud-info \ + && groupadd -g 1999 python \ + && useradd -r -u 1999 -g python python \ + && chown -R python:python /cloud-info +WORKDIR /cloud-info + +# All the python code from the build image above +COPY --chown=python:python --from=build /cloud-info/venv ./venv +# Add the scripts that call the cloud-info-provider as needed for the site +# these create the configuration for the site by discovering the available +# projects for the credentials and will send the output to the AMS queue and +# upload to S3 COPY ams-wrapper.sh /usr/local/bin/ams-wrapper.sh COPY publisher.sh /usr/local/bin/publisher.sh +# These are sample configuration files for cloud-info-provider that can be used +# if the container is used outside of the catchall-operations as described in +# https://docs.egi.eu/providers/cloud-compute/openstack/cloud-info/#local-operations COPY openstack.rc /etc/cloud-info-provider/openstack.rc COPY openstack.yaml /etc/cloud-info-provider/openstack.yaml +USER 1999 + +ENV PATH="/cloud-info/venv/bin:$PATH" CMD ["publisher.sh"] diff --git a/cloud-info/ams-wrapper.sh b/cloud-info/ams-wrapper.sh index b28a758d..c217e1d9 100755 --- a/cloud-info/ams-wrapper.sh +++ b/cloud-info/ams-wrapper.sh @@ -10,9 +10,9 @@ GOCDB_ID=$(python -c "from __future__ import print_function; \ timeout=60)['gocdb_id'], end='')") if test "$AMS_TOKEN_FILE" != ""; then - AMS_TOKEN=$(cat "$AMS_TOKEN_FILE") -elif test "$HOSTCERT" != "" -a "$HOSTKEY" != ""; then - AMS_TOKEN=$(python -c "from argo_ams_library import ArgoMessagingService; \ + AMS_TOKEN=$(cat "$AMS_TOKEN_FILE") +elif test "$HOSTCERT" != "" -a "$HOSTKEY" != ""; then + AMS_TOKEN=$(python -c "from argo_ams_library import ArgoMessagingService; \ ams = ArgoMessagingService(endpoint='$AMS_HOST', \ project='$AMS_PROJECT', \ cert='$HOSTCERT', \ @@ -21,16 +21,18 @@ elif test "$HOSTCERT" != "" -a "$HOSTKEY" != ""; then fi if test "$SITE_NAME" = ""; then - SITE_NAME="$(yq -r .site.name "$CLOUD_INFO_CONFIG" | tr "." "-")" + SITE_NAME="$(yq -r .site.name "$CLOUD_INFO_CONFIG" | tr "." "-")" fi SITE_TOPIC=$(echo "$SITE_NAME" | tr "." "-") AMS_TOPIC="SITE_${SITE_TOPIC}_ENDPOINT_${GOCDB_ID}" # exit if TOPIC is not available. -curl -f "https://$AMS_HOST/v1/projects/$AMS_PROJECT/topics/$AMS_TOPIC?key=$AMS_TOKEN" > /dev/null 2>&1 \ - || (echo "Topic $AMS_TOPIC is not avaiable, aborting!"; false) - +curl -f "https://$AMS_HOST/v1/projects/$AMS_PROJECT/topics/$AMS_TOPIC?key=$AMS_TOKEN" >/dev/null 2>&1 || + ( + echo "Topic $AMS_TOPIC is not avaiable, aborting!" + false + ) # Attempt to generate the site configuration AUTO_CONFIG_PATH="$(mktemp -d)" @@ -39,60 +41,80 @@ AUTO_CONFIG_PATH="$(mktemp -d)" export CHECKIN_SECRETS_FILE="$CHECKIN_SECRETS_PATH/secrets.yaml" # TODO(enolfc): avoid creating new tokens for every provider export ACCESS_TOKEN_FILE="$AUTO_CONFIG_PATH/token.yaml" -USE_ACCESS_TOKEN=0 if token-generator; then - # TODO(enolfc): even if this belows fails, we should use access token as it will provide - # access to more projects - if SECRETS_FILE="$ACCESS_TOKEN_FILE" config-generator > "$AUTO_CONFIG_PATH/site.yaml"; then - # this worked, let's update the env - export CHECKIN_SECRETS_PATH="$AUTO_CONFIG_PATH/vos" - export CLOUD_INFO_CONFIG="$AUTO_CONFIG_PATH/site.yaml" - USE_ACCESS_TOKEN=1 - fi + # TODO(enolfc): even if this belows fails, we should use access token as it will provide + # access to more projects + if SECRETS_FILE="$ACCESS_TOKEN_FILE" config-generator >"$AUTO_CONFIG_PATH/site.yaml"; then + # this worked, let's update the env + export CHECKIN_SECRETS_PATH="$AUTO_CONFIG_PATH/vos" + export CLOUD_INFO_CONFIG="$AUTO_CONFIG_PATH/site.yaml" + fi fi # Any OS related parameter should be available as env variables if test "$CHECKIN_SECRETS_PATH" = ""; then - # Case 1: manual config - cloud-info-provider-service --yaml-file "$CLOUD_INFO_CONFIG" \ - --middleware "$CLOUD_INFO_MIDDLEWARE" \ - --ignore-share-errors \ - --format glue21 > cloud-info.out -elif test "$USE_ACCESS_TOKEN" -eq 1; then - # Case 2: access token style - cloud-info-provider-service --yaml-file "$CLOUD_INFO_CONFIG" \ - --middleware "$CLOUD_INFO_MIDDLEWARE" \ - --ignore-share-errors \ - --auth-refresher accesstoken \ - --format glue21 > cloud-info.out + # Case 1: manual config + cloud-info-provider-service --yaml-file "$CLOUD_INFO_CONFIG" \ + --middleware "$CLOUD_INFO_MIDDLEWARE" \ + --ignore-share-errors \ + --format glue21 >cloud-info.out else - # Let's use the service account directly on the info provider - CHECKIN_DISCOVERY="https://aai.egi.eu/auth/realms/egi/.well-known/openid-configuration" - CLIENT_ID="$(yq -r '.fedcloudops.client_id' < "$CHECKIN_SECRETS_FILE")" - CLIENT_SECRET="$(yq -r '.fedcloudops.client_secret' < "$CHECKIN_SECRETS_FILE")" - cloud-info-provider-service --yaml-file "$CLOUD_INFO_CONFIG" \ - --middleware "$CLOUD_INFO_MIDDLEWARE" \ - --ignore-share-errors \ - --os-auth-type v3oidcclientcredentials \ - --os-discovery-endpoint "$CHECKIN_DISCOVERY" \ - --os-client-id "$CLIENT_ID" \ - --os-client-secret "$CLIENT_SECRET" \ - --os-access-token-type access_token \ - --os-openid-scope "openid profile eduperson_entitlement email" \ - --format glue21 > cloud-info.out + # use service account for everyone + export OS_DISCOVERY_ENDPOINT="https://aai.egi.eu/auth/realms/egi/.well-known/openid-configuration" + OS_CLIENT_ID="$(yq -r '.fedcloudops.client_id' <"$CHECKIN_SECRETS_FILE")" + export OS_CLIENT_ID + OS_CLIENT_SECRET="$(yq -r '.fedcloudops.client_secret' <"$CHECKIN_SECRETS_FILE")" + export OS_CLIENT_SECRET + export OS_ACCESS_TOKEN_TYPE="access_token" + export OS_AUTH_TYPE="v3oidcclientcredentials" + export OS_OPENID_SCOPE="openid profile eduperson_entitlement email" + cloud-info-provider-service --yaml-file "$CLOUD_INFO_CONFIG" \ + --middleware "$CLOUD_INFO_MIDDLEWARE" \ + --ignore-share-errors \ + --format glue21 >cloud-info.out + # Produce the json output also + RCLONE_CONFIG_S3="$(yq -r '.s3' <"$CHECKIN_SECRETS_FILE")" + if test "$RCLONE_CONFIG_S3" != "null"; then + cloud-info-provider-service --yaml-file "$CLOUD_INFO_CONFIG" \ + --middleware "$CLOUD_INFO_MIDDLEWARE" \ + --ignore-share-errors \ + --format glue21json >site.json + fi fi # Fail if there are no shares -grep -q GLUE2ShareID cloud-info.out \ - || (echo "No share information available, aborting!"; false) +grep -q GLUE2ShareID cloud-info.out || + ( + echo "No share information available, aborting!" + false + ) # Publishing on our own as message is too large for some providers ARGO_URL="https://$AMS_HOST/v1/projects/$AMS_PROJECT/topics/$AMS_TOPIC:publish?key=$AMS_TOKEN" -printf '{"messages":[{"attributes":{},"data":"' > ams-payload -grep -v "UNKNOWN" cloud-info.out | grep -v "^#" | grep -v ": $" | gzip | base64 -w 0 >> ams-payload -printf '"}]}' >> ams-payload +printf '{"messages":[{"attributes":{},"data":"' >ams-payload +grep -v "UNKNOWN" cloud-info.out | grep -v "^#" | grep -v ": $" | gzip | base64 -w 0 >>ams-payload +printf '"}]}' >>ams-payload curl -X POST "$ARGO_URL" -H "content-type: application/json" -d @ams-payload +if [ -f site.json ]; then + # Put this info into S3, configure rclone config with + # a provider named "s3" using env variables + export RCLONE_CONFIG_S3_TYPE=s3 + RCLONE_CONFIG_S3_ACCESS_KEY_ID="$(yq -r '.s3.access_key_id' <"$CHECKIN_SECRETS_FILE")" + export RCLONE_CONFIG_S3_ACCESS_KEY_ID + RCLONE_CONFIG_S3_SECRET_ACCESS_KEY="$(yq -r '.s3.secret_access_key' <"$CHECKIN_SECRETS_FILE")" + export RCLONE_CONFIG_S3_SECRET_ACCESS_KEY + RCLONE_CONFIG_S3_ENDPOINT="$(yq -r '.s3.endpoint' <"$CHECKIN_SECRETS_FILE")" + export RCLONE_CONFIG_S3_ENDPOINT + S3_BUCKET_NAME="$(yq -r '.s3.bucket' <"$CHECKIN_SECRETS_FILE")" + export S3_BUCKET_NAME + RCLONE_CONFIG_S3_PROVIDER="$(yq -r '.s3.provider' <"$CHECKIN_SECRETS_FILE")" + export RCLONE_CONFIG_S3_PROVIDER + export RCLONE_CONFIG_S3_ACL=private + export RCLONE_CONFIG_S3_NO_CHECK_BUCKET=true + rclone copy site.json "s3:$S3_BUCKET_NAME/$SITE_NAME" +fi + rm -rf "$VO_CONFIG_PATH" diff --git a/deploy/CESNET-MCC.tfvars b/cloud-info/deploy/CESNET-MCC.tfvars similarity index 100% rename from deploy/CESNET-MCC.tfvars rename to cloud-info/deploy/CESNET-MCC.tfvars diff --git a/deploy/NCG-INGRID-PT.tfvars b/cloud-info/deploy/NCG-INGRID-PT.tfvars similarity index 100% rename from deploy/NCG-INGRID-PT.tfvars rename to cloud-info/deploy/NCG-INGRID-PT.tfvars diff --git a/deploy/README.md b/cloud-info/deploy/README.md similarity index 100% rename from deploy/README.md rename to cloud-info/deploy/README.md diff --git a/deploy/backend.tf b/cloud-info/deploy/backend.tf similarity index 100% rename from deploy/backend.tf rename to cloud-info/deploy/backend.tf diff --git a/deploy/cloud-init.yaml b/cloud-info/deploy/cloud-init.yaml similarity index 92% rename from deploy/cloud-init.yaml rename to cloud-info/deploy/cloud-init.yaml index 06864b62..aa96451a 100644 --- a/deploy/cloud-init.yaml +++ b/cloud-info/deploy/cloud-init.yaml @@ -23,6 +23,8 @@ packages: - ansible - jq - python3-openstackclient + - python3-pip + - python3-venv - retry write_files: @@ -40,14 +42,14 @@ write_files: SLACK_WEBHOOK_URL="%SLACK_WEBHOOK_URL%" COMMIT_SHA="%REF%" SHORT_COMMIT_SHA="%SHORT_REF%" + FEDCLOUD_LOCKER_TOKEN="%FEDCLOUD_LOCKER_TOKEN%" # get the repo code and untar at cwd curl -L -H "Accept: application/vnd.github.v3+raw" \ "https://api.github.com/repos/EGI-Federation/fedcloud-catchall-operations/tarball/$COMMIT_SHA" | \ tar xz --strip=1 cd deploy - echo "%ANSIBLE_SECRETS%" | base64 -d > ./secrets.yaml - ./deploy.sh "$OAUTH_TOKEN" "$COMMIT_SHA" \ + ./deploy.sh "$OAUTH_TOKEN" "$COMMIT_SHA" "$FEDCLOUD_LOCKER_TOKEN" \ "$SHORT_COMMIT_SHA" "$SLACK_WEBHOOK_URL" path: /var/lib/cloud/scripts/per-boot/deploy.sh permissions: '0755' diff --git a/deploy/clouds.yaml b/cloud-info/deploy/clouds.yaml similarity index 100% rename from deploy/clouds.yaml rename to cloud-info/deploy/clouds.yaml diff --git a/cloud-info/deploy/deploy.sh b/cloud-info/deploy/deploy.sh new file mode 100755 index 00000000..e5f6c244 --- /dev/null +++ b/cloud-info/deploy/deploy.sh @@ -0,0 +1,110 @@ +#!/bin/sh +# Configure current host with ansible +# Expects as arguments: +# - a GitHub OAUTH_TOKEN to update the PR +# - the COMMIT_SHA +# - a locker for fedcloud secret to obtain the secrets +# - the SHORT_SHA used for pulling the docker image to use +# - a SLACK_WEBHOOK_URL to report on the status +set -e + +OAUTH_TOKEN="$1" +COMMIT_SHA="$2" +FEDCLOUD_SECRET_LOCKER="$3" +SHORT_SHA="$4" +SLACK_WEBHOOK_URL="$5" + +# create a virtual env for fedcloudclient +python3 -m venv "$PWD/.venv" +"$PWD/.venv/bin/pip" install fedcloudclient + +"$PWD/.venv/bin/fedcloud" secret get --locker-token "$FEDCLOUD_SECRET_LOCKER" \ + deploy data >secrets.yaml + +cat >>deploy-vars.yaml <ansible.log 2>&1; then + status_summary="success" + color="#6DBF59" + header="Successful deployment :rocket:" +else + status_summary="fail" + color="#EA4F47" + header="Failed deployment :boom:" +fi + +# This is a temporary way to get the auto discovery working while we transition for all sites +# copy the secrets to the /etc/egi/vos dir which is readable from the containers +cp secrets.yaml /etc/egi/vos/secrets.yaml + +# make sure the container user (1999) can access the files +chown -R 1999:1999 /etc/egi/ + +GITHUB_COMMIT_URL="https://api.github.com/repos/EGI-Federation/fedcloud-catchall-operations/commits/$COMMIT_SHA/pulls" + +# Find out PR we need to update +ISSUE_NUMBER=$(curl \ + -H "Accept: application/vnd.github.groot-preview+json" \ + "$GITHUB_COMMIT_URL" | jq .[0].number) + +GITHUB_ISSUE_URL="https://api.github.com/repos/EGI-Federation/fedcloud-catchall-operations/issues/$ISSUE_NUMBER/comments" + +{ + echo "### Ansible deployment: \`$status_summary\`" + echo '
Deployment log' + echo + echo '```' + cat ansible.log + echo '```' + echo + echo '
' +} >github_body.txt +echo "{}" | jq --arg b "$(cat github_body.txt)" '{body: $b}' >github_body.json + +# Let GitHub know +comment_url=$(curl -X POST \ + -H "Authorization: token $OAUTH_TOKEN" \ + -H "Accept: application/vnd.github.v3+json" \ + "$GITHUB_ISSUE_URL" \ + --data @github_body.json | + jq -r .html_url) + +cat >slack_body.json < " + } + } + ] + } + ] +} +EOF + +# Let Slack know +curl -X POST -H 'Content-type: application/json' \ + --data @slack_body.json \ + "$SLACK_WEBHOOK_URL" diff --git a/deploy/extra-vars.yaml b/cloud-info/deploy/extra-vars.yaml similarity index 100% rename from deploy/extra-vars.yaml rename to cloud-info/deploy/extra-vars.yaml diff --git a/deploy/inventory.yaml b/cloud-info/deploy/inventory.yaml similarity index 100% rename from deploy/inventory.yaml rename to cloud-info/deploy/inventory.yaml diff --git a/deploy/main.tf b/cloud-info/deploy/main.tf similarity index 100% rename from deploy/main.tf rename to cloud-info/deploy/main.tf diff --git a/cloud-info/deploy/playbook.yaml b/cloud-info/deploy/playbook.yaml new file mode 100644 index 00000000..9cda45fd --- /dev/null +++ b/cloud-info/deploy/playbook.yaml @@ -0,0 +1,7 @@ +--- +- hosts: all + become: true + roles: + - role: catchall + vars: + site_config_dir: ../sites/ diff --git a/cloud-info/deploy/roles/catchall/defaults/main.yaml b/cloud-info/deploy/roles/catchall/defaults/main.yaml new file mode 100644 index 00000000..40bc762a --- /dev/null +++ b/cloud-info/deploy/roles/catchall/defaults/main.yaml @@ -0,0 +1,35 @@ +# AMS details +ams_project: egi_cloud_info +ams_host: msg.argo.grnet.gr +ams_token: secret + +# check-in endpoint +checkin: + token_endpoint: "https://aai.egi.eu/auth/realms/egi/protocol/openid-connect/token" + +# docker image for the cloud info provider +cloud_info_image: egifedcloud/ops-cloud-info:latest + +# site configuration location +site_config_dir: sites + +# default user for the containers +egi_user: "1999" +egi_group: "1999" + +# No site information as default +sites: [] + +cloud_info_cron: + minute: "4,34" + hour: "*" + weekday: "*" + timeout: "600" + +image_sync_image: egifedcloud/ops-image-sync:latest + +image_sync_cron: + minute: "5" + hour: "*/3" + weekday: "*" + timeout: "9000" # 2.5 hours diff --git a/cloud-info/deploy/roles/catchall/molecule/default/converge.yml b/cloud-info/deploy/roles/catchall/molecule/default/converge.yml new file mode 100644 index 00000000..2f4ad264 --- /dev/null +++ b/cloud-info/deploy/roles/catchall/molecule/default/converge.yml @@ -0,0 +1,28 @@ +--- +- name: Converge + hosts: all + tasks: + - name: "Include catchall role" + ansible.builtin.include_role: + name: "catchall" + vars: + sites: + - endpoint: https://example.com:5000/v3/ + gocdb: foo.bar + vos: + - auth: + project_id: a123456 + name: sample_vo + - auth: + project_id: b987659 + name: vo.example.com + - endpoint: https://site.org:5000/v3/ + gocdb: bar.foo + region: region1 + vos: + - auth: + project_id: a123456 + name: sample_vo + - auth: + project_id: b987659 + name: vo.example.com diff --git a/cloud-info/deploy/roles/catchall/molecule/default/molecule.yml b/cloud-info/deploy/roles/catchall/molecule/default/molecule.yml new file mode 100644 index 00000000..fcdd0e07 --- /dev/null +++ b/cloud-info/deploy/roles/catchall/molecule/default/molecule.yml @@ -0,0 +1,13 @@ +--- +dependency: + name: galaxy +driver: + name: docker +platforms: + - name: instance + image: ubuntu:latest +lint: ansible-lint --exclude .github/ +provisioner: + name: ansible +verifier: + name: testinfra diff --git a/cloud-info/deploy/roles/catchall/molecule/default/tests/test_default.py b/cloud-info/deploy/roles/catchall/molecule/default/tests/test_default.py new file mode 100644 index 00000000..5fb4704b --- /dev/null +++ b/cloud-info/deploy/roles/catchall/molecule/default/tests/test_default.py @@ -0,0 +1,30 @@ +import hashlib +import os + +import testinfra.utils.ansible_runner + +testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( + os.environ["MOLECULE_INVENTORY_FILE"] +).get_hosts("all") + + +def test_site_files(host): + endpoint_hash = hashlib.md5(b"https://example.com:5000/v3/").hexdigest() + filename = "foo-bar-%s" % endpoint_hash + assert host.file("/etc/egi/cloud-info/").is_directory + assert host.file("/etc/egi/cloud-info/%s.yaml" % filename).exists + assert not host.file("/etc/egi/cloud-info/%s.env" % filename).contains("OS_REGION") + assert host.file("/etc/egi/cloud-info/%s.env" % filename).exists + assert host.file("/etc/cron.d/cloud-info-%s" % filename).exists + + +def test_site_files_region(host): + endpoint_hash = hashlib.md5(b"https://site.org:5000/v3/").hexdigest() + filename = "bar-foo-%s" % endpoint_hash + assert host.file("/etc/egi/cloud-info/").is_directory + assert host.file("/etc/egi/cloud-info/%s.yaml" % filename).exists + assert host.file("/etc/egi/cloud-info/%s.env" % filename).exists + assert host.file("/etc/egi/cloud-info/%s.env" % filename).contains( + "OS_REGION=region1" + ) + assert host.file("/etc/cron.d/cloud-info-%s" % filename).exists diff --git a/cloud-info/deploy/roles/catchall/requirements.txt b/cloud-info/deploy/roles/catchall/requirements.txt new file mode 100644 index 00000000..c745c484 --- /dev/null +++ b/cloud-info/deploy/roles/catchall/requirements.txt @@ -0,0 +1,4 @@ +molecule +molecule-plugins[docker] +pytest-testinfra +ansible-lint diff --git a/cloud-info/deploy/roles/catchall/tasks/cloud-info.yml b/cloud-info/deploy/roles/catchall/tasks/cloud-info.yml new file mode 100644 index 00000000..5b2c8fe3 --- /dev/null +++ b/cloud-info/deploy/roles/catchall/tasks/cloud-info.yml @@ -0,0 +1,38 @@ +--- +- name: Cloud-info dirs + ansible.builtin.file: + path: "{{ item }}" + state: directory + mode: "755" + owner: "{{ egi_user }}" + group: "{{ egi_group }}" + loop: + - /etc/egi/cloud-info + - /var/lock/cloud-info + - /var/log/cloud-info + +- name: Cloud-info config directory + ansible.builtin.template: + src: site-info.yaml.j2 + dest: /etc/egi/cloud-info/{{ filename }}.yaml + mode: "600" + +- name: Cloud info env + ansible.builtin.template: + src: cloud-info.env.j2 + dest: /etc/egi/cloud-info/{{ filename }}.env + mode: "600" + +- name: Cloud info cron + ansible.builtin.cron: + name: cloud-info-provider {{ site.gocdb }} + weekday: "{{ cloud_info_cron.weekday }}" + minute: "{{ cloud_info_cron.minute }}" + hour: "{{ cloud_info_cron.hour }}" + user: root + job: > + flock -n -w {{ cloud_info_cron.timeout }} /var/lock/cloud-info/{{ filename }} + docker run --rm -v /etc/egi:/etc/egi:ro + --env-file /etc/egi/cloud-info/{{ filename }}.env + {{ cloud_info_image }} >> /var/log/cloud-info/{{ filename }}.log 2>&1 + cron_file: "cloud-info-{{ filename }}" diff --git a/cloud-info/deploy/roles/catchall/tasks/docker.yml b/cloud-info/deploy/roles/catchall/tasks/docker.yml new file mode 100644 index 00000000..15405be8 --- /dev/null +++ b/cloud-info/deploy/roles/catchall/tasks/docker.yml @@ -0,0 +1,59 @@ +--- +- name: Install dependencies + ansible.builtin.apt: + name: + - apt-transport-https + - ca-certificates + - curl + - gnupg-agent + - software-properties-common + state: present + update_cache: true + +- name: Docker repo key + ansible.builtin.apt_key: + id: 9DC858229FC7DD38854AE2D88D81803C0EBFCD88 + url: https://download.docker.com/linux/ubuntu/gpg + state: present + +- name: Add docker repo + ansible.builtin.apt_repository: + repo: "deb [arch=amd64] https://download.docker.com/linux/ubuntu {{ ansible_distribution_release }} stable" + state: present + +- name: Install docker + ansible.builtin.apt: + name: + - docker-ce + - docker-ce-cli + - containerd.io + state: present + update_cache: true + +- name: Ensure docker config dir is present + ansible.builtin.file: + path: /etc/docker + state: directory + mode: "775" + +- name: Configure docker + ansible.builtin.copy: + # this is very CESNET-MCC specific, may be better to move as configurable + content: | + { + "mtu": 1442, + "exec-opts": ["native.cgroupdriver=systemd"], + "log-driver": "json-file", + "log-opts": { + "max-size": "100m" + }, + "storage-driver": "overlay2" + } + dest: /etc/docker/daemon.json + mode: "660" + +- name: Restart docker + ansible.builtin.systemd: + name: docker + state: restarted + daemon_reload: true diff --git a/cloud-info/deploy/roles/catchall/tasks/main.yml b/cloud-info/deploy/roles/catchall/tasks/main.yml new file mode 100644 index 00000000..508ae366 --- /dev/null +++ b/cloud-info/deploy/roles/catchall/tasks/main.yml @@ -0,0 +1,59 @@ +--- +- name: Ensure cron is available + tags: ["cloud-info", "sync"] + ansible.builtin.apt: + name: cron + state: present + update_cache: true + +- name: Install docker + ansible.builtin.include_tasks: docker.yml + # this is only executed if explicity requested + tags: ['never', 'docker'] + +- name: Load site configuration + tags: ["cloud-info", "sync"] + ansible.builtin.include_vars: + file: "{{ item }}" + name: "{{ 'site_incl_vars_' ~ item | basename | splitext | first }}" + with_fileglob: + - "{{ site_config_dir }}/*.yaml" + +- name: Set site configuration variable + tags: ["cloud-info", "sync"] + ansible.builtin.set_fact: + sites: "{{ sites | default([]) + [lookup('vars', item)] }}" + loop: "{{ query('varnames', '^site_incl_vars_(.*)$') }}" + +- name: EGI configuration directories + tags: ["cloud-info", "sync"] + ansible.builtin.file: + path: "{{ item }}" + state: directory + mode: "755" + owner: "{{ egi_user }}" + group: "{{ egi_group }}" + loop: + - /etc/egi + - /etc/egi/vos + +- name: Site specific config + tags: ["cloud-info"] + ansible.builtin.include_tasks: + file: cloud-info.yml + apply: + tags: "cloud-info" + vars: + site: "{{ item }}" + filename: "{{ item.gocdb | replace('.', '-') }}-{{ item.endpoint | hash('md5') }}" + with_items: + - "{{ sites }}" + when: sites is iterable + + +- name: Image sync config + tags: ["sync"] + ansible.builtin.include_tasks: + file: sync.yml + apply: + tags: "sync" diff --git a/cloud-info/deploy/roles/catchall/tasks/sync.yml b/cloud-info/deploy/roles/catchall/tasks/sync.yml new file mode 100644 index 00000000..f0d547b9 --- /dev/null +++ b/cloud-info/deploy/roles/catchall/tasks/sync.yml @@ -0,0 +1,34 @@ +--- +- name: Sync dirs + ansible.builtin.file: + path: "{{ item }}" + state: directory + mode: "755" + owner: "{{ egi_user }}" + group: "{{ egi_group }}" + loop: + - /etc/egi/image_sync + - /var/cache/image_sync + +- name: sync configuration + ansible.builtin.template: + src: sync.conf.j2 + dest: /etc/egi/image_sync/sync.conf + mode: "600" + owner: "{{ egi_user }}" + group: "{{ egi_group }}" + +- name: Image sync cron + ansible.builtin.cron: + name: image sync + weekday: "{{ image_sync_cron.weekday }}" + minute: "{{ image_sync_cron.minute }}" + hour: "{{ image_sync_cron.hour }}" + user: root + job: > + flock -n -w {{ image_sync.timeout }} /var/lock/sync + docker run --rm -v /etc/egi:/etc/egi:ro + -v {{ site_config_dir }}:/sites:ro + {{ image_sync_image }} sites-sync + --config-dir /etc/egi/image_sync >> /var/log/sync.log 2>&1 + cron_file: "egi-image-sync" diff --git a/cloud-info/deploy/roles/catchall/templates/cloud-info.env.j2 b/cloud-info/deploy/roles/catchall/templates/cloud-info.env.j2 new file mode 100644 index 00000000..9c7c6e2a --- /dev/null +++ b/cloud-info/deploy/roles/catchall/templates/cloud-info.env.j2 @@ -0,0 +1,14 @@ +AMS_HOST={{ ams_host }} +AMS_PROJECT={{ ams_project }} +AMS_TOKEN={{ ams_token }} +CHECKIN_OIDC_TOKEN={{ checkin_token_endpoint }} +CHECKIN_SECRETS_PATH=/etc/egi/vos/ +CLOUD_INFO_CONFIG=/etc/egi/cloud-info/{{ filename }}.yaml +OS_AUTH_TYPE=v3oidcaccesstoken +OS_AUTH_URL={{ site.endpoint }} +OS_IDENTITY_PROVIDER=egi.eu +OS_PROTOCOL={{ site.protocol | default('openid') }} +{% if "region" in site %} +OS_REGION={{ site.region }} +{% endif %} +SITE_NAME={{ site.gocdb }} diff --git a/cloud-info/deploy/roles/catchall/templates/site-info.yaml.j2 b/cloud-info/deploy/roles/catchall/templates/site-info.yaml.j2 new file mode 100644 index 00000000..293e3ccf --- /dev/null +++ b/cloud-info/deploy/roles/catchall/templates/site-info.yaml.j2 @@ -0,0 +1,9 @@ +site: + name: {{ site.gocdb }} + +compute: + shares: +{% for vo in site.vos %} + {{ vo.name }}: + {{ vo | default({}) | to_nice_yaml(indent=2) | indent(6) }} +{% endfor %} diff --git a/cloud-info/deploy/roles/catchall/vars/main.yml b/cloud-info/deploy/roles/catchall/vars/main.yml new file mode 100644 index 00000000..44b09048 --- /dev/null +++ b/cloud-info/deploy/roles/catchall/vars/main.yml @@ -0,0 +1 @@ +# Role variables diff --git a/deploy/vars.tf b/cloud-info/deploy/vars.tf similarity index 100% rename from deploy/vars.tf rename to cloud-info/deploy/vars.tf diff --git a/deploy/versions.tf b/cloud-info/deploy/versions.tf similarity index 100% rename from deploy/versions.tf rename to cloud-info/deploy/versions.tf diff --git a/cloud-info/publisher.sh b/cloud-info/publisher.sh index fe89b88f..0ba9e979 100755 --- a/cloud-info/publisher.sh +++ b/cloud-info/publisher.sh @@ -10,9 +10,9 @@ export CLOUD_INFO_MIDDLEWARE=openstack ams-wrapper.sh if [ -n "$OCCI_ENDPOINT" ]; then - # OCCI - export GOCDB_URL="$OCCI_ENDPOINT" - export GOCDB_SERVICE_TYPE=eu.egi.cloud.vm-management.occi - export CLOUD_INFO_MIDDLEWARE=ooi - ams-wrapper.sh + # OCCI + export GOCDB_URL="$OCCI_ENDPOINT" + export GOCDB_SERVICE_TYPE=eu.egi.cloud.vm-management.occi + export CLOUD_INFO_MIDDLEWARE=ooi + ams-wrapper.sh fi diff --git a/cloud-info/requirements.txt b/cloud-info/requirements.txt index b0c673c5..bfd6f90f 100644 --- a/cloud-info/requirements.txt +++ b/cloud-info/requirements.txt @@ -1,5 +1,6 @@ -# Cloud info version is 9d4c4c516b9311c77564444cb9ecbb059b7f2192 -git+https://github.com/EGI-Federation/cloud-info-provider.git@9d4c4c516b9311c77564444cb9ecbb059b7f2192 +# Cloud info version is 43cefc204b3e07211c6c37df2ee20eab845c3428 +# 43cefc204b3e07211c6c37df2ee20eab845c3428 includes json glue support +git+https://github.com/EGI-Federation/cloud-info-provider.git@43cefc204b3e07211c6c37df2ee20eab845c3428 git+https://github.com/ARGOeu/argo-ams-library@devel python-glanceclient python-novaclient diff --git a/cloud-info/cloud_info_catchall/__init__.py b/cloud-info/src/cloud_info_catchall/__init__.py similarity index 100% rename from cloud-info/cloud_info_catchall/__init__.py rename to cloud-info/src/cloud_info_catchall/__init__.py diff --git a/cloud-info/cloud_info_catchall/config_generator.py b/cloud-info/src/cloud_info_catchall/config_generator.py similarity index 98% rename from cloud-info/cloud_info_catchall/config_generator.py rename to cloud-info/src/cloud_info_catchall/config_generator.py index f9c3d071..a7aa79b0 100755 --- a/cloud-info/cloud_info_catchall/config_generator.py +++ b/cloud-info/src/cloud_info_catchall/config_generator.py @@ -49,6 +49,8 @@ def generate_shares(config, secrets): discoverer = RefresherShareDiscovery(config, secrets[s]) elif "access_token" in secrets[s]: discoverer = AccessTokenShareDiscovery(config, secrets[s]) + else: + continue token_shares = discoverer.get_token_shares() shares.update(token_shares) if not shares: diff --git a/cloud-info/cloud_info_catchall/share_discovery.py b/cloud-info/src/cloud_info_catchall/share_discovery.py similarity index 95% rename from cloud-info/cloud_info_catchall/share_discovery.py rename to cloud-info/src/cloud_info_catchall/share_discovery.py index e9eb8b8f..57de67db 100644 --- a/cloud-info/cloud_info_catchall/share_discovery.py +++ b/cloud-info/src/cloud_info_catchall/share_discovery.py @@ -100,8 +100,3 @@ class AccessTokenShareDiscovery(ShareDiscovery): def get_token(self): return self.secret["access_token"] - - def build_share(self, project, access_token): - s = super().build_share(project, access_token) - s["auth"].update({"access_token": access_token}) - return s diff --git a/cloud-info/cloud_info_catchall/test_config_generator.py b/cloud-info/src/cloud_info_catchall/test_config_generator.py similarity index 100% rename from cloud-info/cloud_info_catchall/test_config_generator.py rename to cloud-info/src/cloud_info_catchall/test_config_generator.py diff --git a/cloud-info/cloud_info_catchall/test_share_discovery.py b/cloud-info/src/cloud_info_catchall/test_share_discovery.py similarity index 98% rename from cloud-info/cloud_info_catchall/test_share_discovery.py rename to cloud-info/src/cloud_info_catchall/test_share_discovery.py index 990fe881..ff5b5f70 100644 --- a/cloud-info/cloud_info_catchall/test_share_discovery.py +++ b/cloud-info/src/cloud_info_catchall/test_share_discovery.py @@ -162,7 +162,7 @@ def test_build_share(self): project = {"id": "foobar"} self.assertEqual( self.discoverer.build_share(project, "token"), - {"auth": {"project_id": "foobar", "access_token": "token"}}, + {"auth": {"project_id": "foobar"}}, ) diff --git a/cloud-info/cloud_info_catchall/test_token_generator.py b/cloud-info/src/cloud_info_catchall/test_token_generator.py similarity index 95% rename from cloud-info/cloud_info_catchall/test_token_generator.py rename to cloud-info/src/cloud_info_catchall/test_token_generator.py index 46a52ac3..6edff075 100644 --- a/cloud-info/cloud_info_catchall/test_token_generator.py +++ b/cloud-info/src/cloud_info_catchall/test_token_generator.py @@ -100,14 +100,17 @@ def test_valid_token_expired_exception(self, m_calendar, m_decode, m_header, m_a @patch("cloud_info_catchall.token_generator.get_access_token") def test_generate_tokens(self, m_get_access, m_valid_token): tokens = {"foo": {"access_token": "abc"}, "bar": {"access_token": "def"}} - secrets = {"foo": {}, "bar": {}} + secrets = { + "foo": {"client_id": "foo", "client_secret": "secfoo"}, + "bar": {"client_id": "bar", "client_secret": "secbar"}, + } m_valid_token.side_effect = [True, False] m_get_access.return_value = "xyz" tg.generate_tokens(self.OIDC_CONFIG, "abc", tokens, 8, secrets) m_valid_token.assert_has_calls( [call("abc", self.OIDC_CONFIG, 8), call("def", self.OIDC_CONFIG, 8)] ) - m_get_access.assert_called_with("https://example.com", "abc", {}) + m_get_access.assert_called_with("https://example.com", "abc", secrets["bar"]) if __name__ == "__main__": diff --git a/cloud-info/cloud_info_catchall/token_generator.py b/cloud-info/src/cloud_info_catchall/token_generator.py similarity index 96% rename from cloud-info/cloud_info_catchall/token_generator.py rename to cloud-info/src/cloud_info_catchall/token_generator.py index aa56a0ca..c79ec964 100755 --- a/cloud-info/cloud_info_catchall/token_generator.py +++ b/cloud-info/src/cloud_info_catchall/token_generator.py @@ -71,6 +71,9 @@ def generate_tokens(oidc_config, scopes, tokens, token_ttl, secrets): # not our thing if not isinstance(secrets[s], dict): continue + if "client_id" not in secrets[s] or "client_secret" not in secrets[s]: + # not suitable for us + continue if "refresh_token" in secrets[s]: # ignore those that have refresh token continue diff --git a/deploy/deploy.sh b/deploy/deploy.sh deleted file mode 100755 index eaff5587..00000000 --- a/deploy/deploy.sh +++ /dev/null @@ -1,94 +0,0 @@ -#!/bin/sh -# Configure current host with ansible -# Expects as arguments the OAUTH_TOKEN, the COMMIT_SHA and the SLACK_WEBHOOK_URL - -set -e - -OAUTH_TOKEN="$1" -COMMIT_SHA="$2" -SHORT_SHA="$3" -SLACK_WEBHOOK_URL="$4" - -ansible-galaxy install git+https://github.com/EGI-Federation/ansible-role-fedcloud-ops.git - -echo "cloud_info_image: \"ghcr.io/egi-federation/fedcloud-cloud-info:sha-$SHORT_SHA\"" >> extra-vars.yaml - -# Configure! -if ansible-playbook -i inventory.yaml \ - --extra-vars @secrets.yaml \ - --extra-vars @extra-vars.yaml \ - --extra-vars @vos.yaml \ - playbook.yaml >ansible.log 2>&1 ; then - status_summary="success" - color="#6DBF59" - header="Successful deployment :rocket:" -else - status_summary="fail" - color="#EA4F47" - header="Failed deployment :boom:" -fi - -# This is a temporary way to get the auto discovery working while we transition for all sites -# copy the secrets to the /etc/egi/vos dir which is readable from the containers -cp secrets.yaml /etc/egi/vos/secrets.yaml - -GITHUB_COMMIT_URL="https://api.github.com/repos/EGI-Federation/fedcloud-catchall-operations/commits/$COMMIT_SHA/pulls" - -# Find out PR we need to update -ISSUE_NUMBER=$(curl \ - -H "Accept: application/vnd.github.groot-preview+json" \ - "$GITHUB_COMMIT_URL" | jq .[0].number) - -GITHUB_ISSUE_URL="https://api.github.com/repos/EGI-Federation/fedcloud-catchall-operations/issues/$ISSUE_NUMBER/comments" - -{ - echo "### Ansible deployment: \`$status_summary\`" - echo '
Deployment log' - echo - echo '```' - cat ansible.log - echo '```' - echo - echo '
' -} > github_body.txt -echo "{}" | jq --arg b "$(cat github_body.txt)" '{body: $b}' > github_body.json - -# Let GitHub know -comment_url=$(curl -X POST \ - -H "Authorization: token $OAUTH_TOKEN" \ - -H "Accept: application/vnd.github.v3+json" \ - "$GITHUB_ISSUE_URL" \ - --data @github_body.json | \ - jq -r .html_url) - -cat > slack_body.json << EOF -{ - "attachments": [ - { - "color": "$color", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "$header", - "emoji": true - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "fedcloud-catchall-operations deployment was completed for <$comment_url| PR \`#$ISSUE_NUMBER\`> " - } - } - ] - } - ] -} -EOF - -# Let Slack know -curl -X POST -H 'Content-type: application/json' \ - --data @slack_body.json \ - "$SLACK_WEBHOOK_URL" diff --git a/deploy/playbook.yaml b/deploy/playbook.yaml deleted file mode 100644 index 7d9ab46f..00000000 --- a/deploy/playbook.yaml +++ /dev/null @@ -1,9 +0,0 @@ ---- -- hosts: all - become: true - roles: - - role: ansible-role-fedcloud-ops - tags: ["all", "docker"] - vars: - site_config_dir: ../sites/ - checkin_token_endpoint: https://aai.egi.eu/auth/realms/egi/protocol/openid-connect/token diff --git a/deploy/vos.yaml b/deploy/vos.yaml deleted file mode 100644 index 33b60ee8..00000000 --- a/deploy/vos.yaml +++ /dev/null @@ -1,212 +0,0 @@ ---- -vos: - # Co-manage service account - EOServices-vo.indra.es: - auth: "{{ fedcloud_sa }}" - biinsight.eosc-hub.eu: - auth: "{{ fedcloud_sa }}" - cesga.es: - auth: "{{ fedcloud_sa }}" - cloud.egi.eu: - auth: "{{ fedcloud_sa }}" - cos4cloud-eosc.eu: - auth: "{{ fedcloud_sa }}" - covid19.eosc-synergy.eu: - auth: "{{ fedcloud_sa }}" - cryoem.instruct-eric.eu: - auth: "{{ fedcloud_sa }}" - crystal_channelling_simulation.vo.egi.eu: - auth: "{{ fedcloud_sa }}" - culturalheritage.vo.egi.eu: - auth: "{{ fedcloud_sa }}" - d4science.org: - auth: "{{ fedcloud_sa }}" - datafurn.eosc-hub.eu: - auth: "{{ fedcloud_sa }}" - eosc-synergy.eu: - auth: "{{ fedcloud_sa }}" - fermi-lat.infn.it: - auth: "{{ fedcloud_sa }}" - icecube: - auth: "{{ fedcloud_sa }}" - ispravision.vo.egi.eu: - auth: "{{ fedcloud_sa }}" - kampal.eosc-hub.eu: - auth: "{{ fedcloud_sa }}" - med.semmelweis-univ.hu: - auth: "{{ fedcloud_sa }}" - mswss.ui.savba.sk: - auth: "{{ fedcloud_sa }}" - mteam.data.kit.edu: - auth: "{{ fedcloud_sa }}" - o3as.data.kit.edu: - auth: "{{ fedcloud_sa }}" - openrisknet.org: - auth: "{{ fedcloud_sa }}" - peachnote.com: - auth: "{{ fedcloud_sa }}" - saps-vo.i3m.upv.es: - auth: "{{ fedcloud_sa }}" - training.egi.eu: - auth: "{{ fedcloud_sa }}" - umsa.cerit-sc.cz: - auth: "{{ fedcloud_sa }}" - vo.access.egi.eu: - auth: "{{ fedcloud_sa }}" - vo.ai4eosc.eu: - auth: "{{ fedcloud_sa }}" - vo.aneris.eu: - auth: "{{ fedcloud_sa }}" - vo.bd4nrg.eu: - auth: "{{ fedcloud_sa }}" - vo.binare-oy.eu: - auth: "{{ fedcloud_sa }}" - vo.beamide.com: - auth: "{{ fedcloud_sa }}" - vo.builtrix.tech: - auth: "{{ fedcloud_sa }}" - vo.carouseldancing.org: - auth: "{{ fedcloud_sa }}" - vo.cite.gr: - auth: "{{ fedcloud_sa }}" - vo.decido-project.eu: - auth: "{{ fedcloud_sa }}" - vo.deltares.nl: - auth: "{{ fedcloud_sa }}" - vo.digitbrain.eu: - auth: "{{ fedcloud_sa }}" - vo.ebrain-health.eu: - auth: "{{ fedcloud_sa }}" - vo.emphasisproject.eu: - auth: "{{ fedcloud_sa }}" - vo.emso-eric.eu: - auth: "{{ fedcloud_sa }}" - vo.enes.org: - auth: "{{ fedcloud_sa }}" - vo.environmental.egi.eu: - auth: "{{ fedcloud_sa }}" - vo.eu-openscreen.eu: - auth: "{{ fedcloud_sa }}" - vo.eurogeoss.eu: - auth: "{{ fedcloud_sa }}" - vo.eurosea.marine.ie: - auth: "{{ fedcloud_sa }}" - vo.e-rihs.eu: - auth: "{{ fedcloud_sa }}" - vo.geoss.eu: - auth: "{{ fedcloud_sa }}" - vo.i-nergy.eu: - auth: "{{ fedcloud_sa }}" - vo.imagine-ai.eu: - auth: "{{ fedcloud_sa }}" - vo.inteligg.com: - auth: "{{ fedcloud_sa }}" - vo.latitudo40.com.eu: - auth: "{{ fedcloud_sa }}" - vo.lethe-project.eu: - auth: "{{ fedcloud_sa }}" - vo.matrycs.eu: - auth: "{{ fedcloud_sa }}" - vo.max-centre.eu: - auth: "{{ fedcloud_sa }}" - vo.mightee.idia.za: - auth: "{{ fedcloud_sa }}" - vo.nextgeoss.eu: - auth: "{{ fedcloud_sa }}" - vo.notebooks.egi.eu: - auth: "{{ fedcloud_sa }}" - vo.obsea.es: - auth: "{{ fedcloud_sa }}" - vo.oipub.com: - auth: "{{ fedcloud_sa }}" - vo.openrdm.eu: - auth: "{{ fedcloud_sa }}" - vo.operas-eu.org: - auth: "{{ fedcloud_sa }}" - vo.pangeo.eu: - auth: "{{ fedcloud_sa }}" - vo.panosc.eu: - auth: "{{ fedcloud_sa }}" - vo.phiri.eu: - auth: "{{ fedcloud_sa }}" - vo.pithia.eu: - auth: "{{ fedcloud_sa }}" - vo.plocan.eu: - auth: "{{ fedcloud_sa }}" - vo.projectescape.eu: - auth: "{{ fedcloud_sa }}" - vo.seadatanet.org: - auth: "{{ fedcloud_sa }}" - vo.sphinxsys.org: - auth: "{{ fedcloud_sa }}" - vo.stars4all.eu: - auth: "{{ fedcloud_sa }}" - vo.thepund.it: - auth: "{{ fedcloud_sa }}" - vo.ubora-biomedical.org: - auth: "{{ fedcloud_sa }}" - vo.usegalaxy.eu: - auth: "{{ fedcloud_sa }}" - worsica.vo.incd.pt: - auth: "{{ fedcloud_sa }}" - wp9-pilot2.eosc-hub.eu: - auth: "{{ fedcloud_sa }}" - vo.radiotracers4psma.eu: - auth: "{{ fedcloud_sa }}" - # Perun based - aquamonitor.c-scale.eu: - auth: "{{ fedcloud_sa }}" - demo.fedcloud.egi.eu: - auth: "{{ fedcloud_sa }}" - eval.c-scale.eu: - auth: "{{ fedcloud_sa }}" - waterwatch.c-scale.eu: - auth: "{{ fedcloud_sa }}" - - # certificate based VOs - biomed: - auth: "{{ robot_sa }}" - cesga: - auth: "{{ robot_sa }}" - dteam: - auth: "{{ robot_sa }}" - eiscat.se: - auth: "{{ robot_sa }}" - enmr.eu: - auth: "{{ robot_sa }}" - fedcloud.egi.eu: - auth: "{{ robot_sa }}" - fusion: - auth: "{{ robot_sa }}" - ops: - auth: "{{ robot_sa }}" - vo.nbis.se: - auth: "{{ robot_sa }}" - - # Eduteams based - vo.eoscfuture-sp.panosc.eu: - auth: "{{ fedcloud_sa }}" - vo.europlanet-vespa.eu: - auth: "{{ fedcloud_sa }}" - lagoproject.net: - auth: "{{ fedcloud_sa }}" - - # Missing membership - chipster.csc.fi: - cms: - belle: - bioisi: - drihm.eu: - opencoast.eosc-hub.eu: - vo.clarin.eu: - vo.complex-systems.eu: - vo.envri-fair.eu: - vo.envrihub.eu: - vo.indigo-datacloud.eu: - vo.lifewatch.eu: - vo.inactive-sarscov2.eu: - university.eosc-synergy.eu: - # unclear - vo.elixir-europe.org: - # VOMS but down - blazarmonitoring.asi.it: diff --git a/doc/architecture/decisions/0001-record-architecture-decisions.md b/doc/architecture/decisions/0001-record-architecture-decisions.md new file mode 100644 index 00000000..910a18ed --- /dev/null +++ b/doc/architecture/decisions/0001-record-architecture-decisions.md @@ -0,0 +1,19 @@ +# 1. Record architecture decisions + +Date: 2024-05-24 + +## Status + +Accepted + +## Context + +We need to record the architectural decisions made on this project. + +## Decision + +We will use Architecture Decision Records, as [described by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions). + +## Consequences + +See Michael Nygard's article, linked above. For a lightweight ADR toolset, see Nat Pryce's [adr-tools](https://github.com/npryce/adr-tools). diff --git a/doc/architecture/decisions/0002-json-rendering.md b/doc/architecture/decisions/0002-json-rendering.md new file mode 100644 index 00000000..c454542c --- /dev/null +++ b/doc/architecture/decisions/0002-json-rendering.md @@ -0,0 +1,36 @@ +# 2. JSON rendering + +Date: 2024-05-24 + +## Status + +Accepted + +## Context + +The information published by cloud-info-provider has used GLUE2.1 LDIF +rendering as this was initially used for publication into a LDAP-based BDII. +As we moved to the AppDB IS, BDII is no longer needed, still as the AppDB IS +implementation was created with BDII in mind, it was ready to parse LDIF files +and it was easier/faster to keep producing information in that format. + +Now for the redesign of the system, LDIF is no longer a requirement and formats +easier to handle and queried can be used. Besides +[LDIF](https://github.com/OGF-GLUE/LDAP), GLUE2.1 has [XML](https://github.com/OGF-GLUE/XSD), +[SQL](https://github.com/OGF-GLUE/SQL), and [JSON](https://github.com/OGF-GLUE/JSON). + +## Decision + +Start producing information using the GLUE JSON rendering, getting ready for +its consumption by new tools to be developed that will eventually replace the +AppDB IS. Publish this information into a S3 bucket with a directory per +available site. For uploading the JSON objects use [rclone](https://rclone.org/) +as this is a generic tool that can work with the potential S3 storage providers +that we will use (CloudFlare/MinIO/Swift) + +Keep publishing the LDIF rendering through the AMS for the AppDB IS. + +## Consequences + +No changes for the AppDB IS as we keep publishing the LDIF rendering, but this +will enable the development of a replacement using a new source of information. diff --git a/image-sync/Dockerfile b/image-sync/Dockerfile new file mode 100644 index 00000000..7310df28 --- /dev/null +++ b/image-sync/Dockerfile @@ -0,0 +1,53 @@ +FROM python:3-slim as build + +SHELL ["/bin/bash", "-o", "pipefail", "-c"] + +# hadolint ignore=DL3008 +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + build-essential gcc git curl gnupg2 \ + && curl -s https://dist.eugridpma.info/distribution/igtf/current/GPG-KEY-EUGridPMA-RPM-3 \ + | apt-key add - \ + && echo "deb https://repository.egi.eu/sw/production/cas/1/current egi-igtf core" > /etc/apt/sources.list.d/igtf.list \ + && apt-get update \ + && apt-get install -y ca-policy-egi-core \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /image-sync + +RUN python -m venv /image-sync/venv +ENV PATH="/image-sync/venv/bin:$PATH" + +COPY requirements.txt . + +RUN pip install --no-cache-dir -r requirements.txt \ + && cat /etc/grid-security/certificates/*.pem >> "$(/image-sync/venv/bin/python -m requests.certs)" + +COPY . . + +RUN pip install --no-cache-dir . + +# The actual image +FROM python:3-slim + +LABEL org.opencontainers.image.source=https://github.com/EGI-Federation/fedcloud-catchall-operations + +SHELL ["/bin/bash", "-o", "pipefail", "-c"] + +# hadolint ignore=DL3015, DL3008 +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + gnupg2 qemu-utils \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /image-sync + +RUN groupadd -g 1999 python \ + && useradd -r -u 1999 -g python python + +COPY --chown=python:python --from=build /image-sync/venv ./venv + +USER 1999 + +ENV PATH="/image-sync/venv/bin:$PATH" +CMD ["image-sync"] diff --git a/image-sync/deploy/.main.tf.swp b/image-sync/deploy/.main.tf.swp new file mode 100644 index 00000000..0f6f7377 Binary files /dev/null and b/image-sync/deploy/.main.tf.swp differ diff --git a/image-sync/deploy/CESNET-MCC.tfvars b/image-sync/deploy/CESNET-MCC.tfvars new file mode 100644 index 00000000..19eeb820 --- /dev/null +++ b/image-sync/deploy/CESNET-MCC.tfvars @@ -0,0 +1,8 @@ +# Network +net_id = "5b8fbb48-461d-4907-ac79-7f7b2ebd17f9" + +# Flavor: standard.medium 2cores/4GB RAM +flavor_id = "4c153ce3-a163-4668-baa7-2cbcb57e2dd8" + +# Image: ubuntu bionic +image_id = "e8d75fc1-ac32-4851-90b5-b4c925e9e6f8" diff --git a/image-sync/deploy/NCG-INGRID-PT.tfvars b/image-sync/deploy/NCG-INGRID-PT.tfvars new file mode 100644 index 00000000..97a34dff --- /dev/null +++ b/image-sync/deploy/NCG-INGRID-PT.tfvars @@ -0,0 +1,8 @@ +# Network +net_id = "f15a0e1f-570e-4135-9739-a59b8c2b3e8e" + +# Flavor: svc1.m 2cores/4GB RAM +flavor_id = "737f8483-8063-4567-a8e5-e09a4bcbdb49" + +# Image: ubuntu 22.04 +image_id = "966f2e5a-7b48-4cb2-be92-6e2132413cf2" diff --git a/image-sync/deploy/README.md b/image-sync/deploy/README.md new file mode 100644 index 00000000..ecea8f66 --- /dev/null +++ b/image-sync/deploy/README.md @@ -0,0 +1,25 @@ +# EGI Cloud Catch-all operations deployment + +This directory manages the deployment automatically on selected providers of +the infrastructure + +## Deployment + +Deployment is performed in 2 phases: + +1. Terraforming the VM where the cloud-info-provider is run +1. Configuring the VM with ansible to run the cloud-info-provider + +Everything is managed automatically via GitHub actions, on pull-requests +the terraform plan is updated and when merging, it's applied and +ansible is run on the resulting infrastructure. + +### Secrets + +Secrets are stored in GitHub. These include: +- `ANSIBLE_SECRETS`: `yaml` file with robot account credentials and AMS token + for pushing messages +- `APP_ID` and `APP_PRIVATE_KEY`: credentials for GitHub app capable of + getting a token to pull the repository at the deployed VM +- `CHECKIN_CLIENT_ID`, `CHECKIN_CLIENT_SECRET` and `CHECKIN_REFRESH_TOKEN` with + valid Check-in credentials for deployment of the VM on the provider diff --git a/image-sync/deploy/backend.tf b/image-sync/deploy/backend.tf new file mode 100644 index 00000000..1fabdd85 --- /dev/null +++ b/image-sync/deploy/backend.tf @@ -0,0 +1,12 @@ +# This is where the info about the deployment is to be stored +terraform { + backend "swift" { + container = "terraform" + cloud = "backend" + } +} + +# The provider where the deployment is actually performed +provider "openstack" { + cloud = "deploy" +} diff --git a/image-sync/deploy/cloud-init.yaml b/image-sync/deploy/cloud-init.yaml new file mode 100644 index 00000000..12696fb2 --- /dev/null +++ b/image-sync/deploy/cloud-init.yaml @@ -0,0 +1,95 @@ +#cloud-config +--- +disk_setup: + ephemeral0: + table_type: 'mbr' + layout: true + overwrite: false +fs_setup: + - label: ephemeral0 + filesystem: ext4 + device: ephemeral0 + partition: any + overwrite: false +mounts: + - [ ephemeral0, /var/cache/image-sync ] + +users: + - name: egi + gecos: EGI + primary_group: egi + groups: users + shell: /bin/bash + sudo: ALL=(ALL) NOPASSWD:ALL + ssh_import_id: + - gh:enolfc + - gh:gwarf + +apt: + sources: + ansible-ppa.list: + source: "deb http://ppa.launchpad.net/ansible/ansible/ubuntu xenial main" + # this is not a secret + keyid: 6125E2A8C77F2818FB7BD15B93C4A3FD7BB9C367 # gitleaks:allow + +packages: + - git + - ansible + - jq + - python3-openstackclient + - python3-pip + - python3-venv + - retry + +write_files: + - content: | + #!/bin/sh + set -e + + mkdir -p /var/tmp/egi + cd /var/tmp/egi || exit + + systemctl start notify + + # Valid GitHub token to access the repo + OAUTH_TOKEN="%TOKEN%" + SLACK_WEBHOOK_URL="%SLACK_WEBHOOK_URL%" + COMMIT_SHA="%REF%" + SHORT_COMMIT_SHA="%SHORT_REF%" + FEDCLOUD_LOCKER_TOKEN="%FEDCLOUD_LOCKER_TOKEN%" + + # get the repo code and untar at cwd + curl -L -H "Accept: application/vnd.github.v3+raw" \ + "https://api.github.com/repos/EGI-Federation/fedcloud-catchall-operations/tarball/$COMMIT_SHA" | \ + tar xz --strip=1 + cd deploy + ./deploy.sh "$OAUTH_TOKEN" "$COMMIT_SHA" "$FEDCLOUD_LOCKER_TOKEN" \ + "$SHORT_COMMIT_SHA" "$SLACK_WEBHOOK_URL" + path: /var/lib/cloud/scripts/per-boot/deploy.sh + permissions: '0755' + - content: | + #!/bin/sh + + mkdir -p /var/tmp/egi + VM_ID="$(cloud-init query instance_id)" + + cloud-init status --wait -l > "/var/tmp/egi/$VM_ID" + # try 10 times, otherwise just die + retry -t 10 -d 90 -- openstack --os-cloud backend object create \ + --name "$VM_ID" fedcloud-catchall "/var/tmp/egi/$VM_ID" + path: /usr/local/bin/notify.sh + permissions: '0755' + - content: | + [Unit] + Description=Notify the github action + + [Service] + ExecStart=/usr/local/bin/notify.sh + + [Install] + WantedBy=multi-user.target + path: /etc/systemd/system/notify.service + - content: "%CLOUDS_YAML%" + encoding: base64 + path: /etc/openstack/clouds.yaml + permissions: '0644' diff --git a/image-sync/deploy/clouds.yaml b/image-sync/deploy/clouds.yaml new file mode 100644 index 00000000..53a61d66 --- /dev/null +++ b/image-sync/deploy/clouds.yaml @@ -0,0 +1,21 @@ +--- +backend_token: &bt "backend_secret" +deploy_token: &dt "deploy_secret" + +clouds: + backend: + site: IISAS-FedCloud + vo: cloud.egi.eu + auth_type: token + auth: + auth_url: https://cloud.ui.savba.sk:5000/v3/ + token: *bt + project_id: 6b042927bcfa466cb9eb56d3ea679987 + deploy: + site: IISAS-FedCloud + vo: cloud.egi.eu + auth_type: token + auth: + auth_url: https://cloud.ui.savba.sk:5000/v3/ + token: *dt + project_id: 6b042927bcfa466cb9eb56d3ea679987 diff --git a/image-sync/deploy/deploy.sh b/image-sync/deploy/deploy.sh new file mode 100755 index 00000000..dd9371a0 --- /dev/null +++ b/image-sync/deploy/deploy.sh @@ -0,0 +1,111 @@ +#!/bin/sh +# Configure current host with ansible +# Expects as arguments: +# - a GitHub OAUTH_TOKEN to update the PR +# - the COMMIT_SHA +# - a locker for fedcloud secret to obtain the secrets +# - the SHORT_SHA used for pulling the docker image to use +# - a SLACK_WEBHOOK_URL to report on the status +set -e + +OAUTH_TOKEN="$1" +COMMIT_SHA="$2" +FEDCLOUD_SECRET_LOCKER="$3" +SHORT_SHA="$4" +SLACK_WEBHOOK_URL="$5" + +# create a virtual env for fedcloudclient +python3 -m venv "$PWD/.venv" +"$PWD/.venv/bin/pip" install fedcloudclient + +TMP_SECRETS="$(mktemp)" +"$PWD/.venv/bin/fedcloud" secret get --locker-token "$FEDCLOUD_SECRET_LOCKER" \ + deploy data >"$TMP_SECRETS" && mv "$TMP_SECRETS" secrets.yaml + +cat >>extra-vars.yaml <ansible.log 2>&1; then + status_summary="success" + color="#6DBF59" + header="Successful deployment :rocket:" +else + status_summary="fail" + color="#EA4F47" + header="Failed deployment :boom:" +fi + +# This is a temporary way to get the auto discovery working while we transition for all sites +# copy the secrets to the /etc/egi/vos dir which is readable from the containers +cp secrets.yaml /etc/egi/vos/secrets.yaml + +# make sure the container user (1999) can access the files +chown -R 1999:1999 /etc/egi/ + +GITHUB_COMMIT_URL="https://api.github.com/repos/EGI-Federation/fedcloud-catchall-operations/commits/$COMMIT_SHA/pulls" + +# Find out PR we need to update +ISSUE_NUMBER=$(curl \ + -H "Accept: application/vnd.github.groot-preview+json" \ + "$GITHUB_COMMIT_URL" | jq .[0].number) + +GITHUB_ISSUE_URL="https://api.github.com/repos/EGI-Federation/fedcloud-catchall-operations/issues/$ISSUE_NUMBER/comments" + +{ + echo "### Ansible deployment: \`$status_summary\`" + echo '
Deployment log' + echo + echo '```' + cat ansible.log + echo '```' + echo + echo '
' +} >github_body.txt +echo "{}" | jq --arg b "$(cat github_body.txt)" '{body: $b}' >github_body.json + +# Let GitHub know +comment_url=$(curl -X POST \ + -H "Authorization: token $OAUTH_TOKEN" \ + -H "Accept: application/vnd.github.v3+json" \ + "$GITHUB_ISSUE_URL" \ + --data @github_body.json | + jq -r .html_url) + +cat >slack_body.json < " + } + } + ] + } + ] +} +EOF + +# Let Slack know +curl -X POST -H 'Content-type: application/json' \ + --data @slack_body.json \ + "$SLACK_WEBHOOK_URL" diff --git a/image-sync/deploy/extra-vars.yaml b/image-sync/deploy/extra-vars.yaml new file mode 100644 index 00000000..3d0b7106 --- /dev/null +++ b/image-sync/deploy/extra-vars.yaml @@ -0,0 +1,3 @@ +--- +# extra configuration variables that we may need for deployment should be +# defined in this file diff --git a/image-sync/deploy/inventory.yaml b/image-sync/deploy/inventory.yaml new file mode 100644 index 00000000..38404062 --- /dev/null +++ b/image-sync/deploy/inventory.yaml @@ -0,0 +1,5 @@ +--- +all: + hosts: + localhost: + ansible_connection: local diff --git a/image-sync/deploy/main.tf b/image-sync/deploy/main.tf new file mode 100644 index 00000000..16efa219 --- /dev/null +++ b/image-sync/deploy/main.tf @@ -0,0 +1,26 @@ +resource "openstack_blockstorage_volume_v3" "image-cache" { + name = "image-cache" + size = 200 +} + +resource "openstack_compute_instance_v2" "image-sync" { + name = "image-sync" + image_id = var.image_id + flavor_id = var.flavor_id + security_groups = ["default"] + user_data = file("cloud-init.yaml") + network { + uuid = var.net_id + } +} + +resource "openstack_compute_volume_attach_v2" "attached" { + instance_id = openstack_compute_instance_v2.image-sync.id + volume_id = openstack_blockstorage_volume_v3.image-cache.id +} + + + +output "instance-id" { + value = openstack_compute_instance_v2.image-sync.id +} diff --git a/image-sync/deploy/playbook.yaml b/image-sync/deploy/playbook.yaml new file mode 100644 index 00000000..9cda45fd --- /dev/null +++ b/image-sync/deploy/playbook.yaml @@ -0,0 +1,7 @@ +--- +- hosts: all + become: true + roles: + - role: catchall + vars: + site_config_dir: ../sites/ diff --git a/image-sync/deploy/roles/catchall/defaults/main.yaml b/image-sync/deploy/roles/catchall/defaults/main.yaml new file mode 100644 index 00000000..decad17e --- /dev/null +++ b/image-sync/deploy/roles/catchall/defaults/main.yaml @@ -0,0 +1,36 @@ +# AMS details +ams_project: egi_cloud_info +ams_host: msg.argo.grnet.gr +ams_token: secret + +# check-in endpoint +checkin: + token_endpoint: "https://aai.egi.eu/auth/realms/egi/protocol/openid-connect/token" + +# docker image for the cloud info provider +cloud_info_image: egifedcloud/ops-cloud-info:latest + +# site configuration location +site_config_dir: sites +site_config_mountpoint: /sites + +# default user for the containers +egi_user: "1999" +egi_group: "1999" + +# No site information as default +sites: [] + +cloud_info_cron: + minute: "4,34" + hour: "*" + weekday: "*" + timeout: "600" + +image_sync_image: egifedcloud/ops-image-sync:latest + +image_sync_cron: + minute: "5" + hour: "*/3" + weekday: "*" + timeout: "9000" # 2.5 hours diff --git a/image-sync/deploy/roles/catchall/molecule/default/converge.yml b/image-sync/deploy/roles/catchall/molecule/default/converge.yml new file mode 100644 index 00000000..2f4ad264 --- /dev/null +++ b/image-sync/deploy/roles/catchall/molecule/default/converge.yml @@ -0,0 +1,28 @@ +--- +- name: Converge + hosts: all + tasks: + - name: "Include catchall role" + ansible.builtin.include_role: + name: "catchall" + vars: + sites: + - endpoint: https://example.com:5000/v3/ + gocdb: foo.bar + vos: + - auth: + project_id: a123456 + name: sample_vo + - auth: + project_id: b987659 + name: vo.example.com + - endpoint: https://site.org:5000/v3/ + gocdb: bar.foo + region: region1 + vos: + - auth: + project_id: a123456 + name: sample_vo + - auth: + project_id: b987659 + name: vo.example.com diff --git a/image-sync/deploy/roles/catchall/molecule/default/molecule.yml b/image-sync/deploy/roles/catchall/molecule/default/molecule.yml new file mode 100644 index 00000000..fcdd0e07 --- /dev/null +++ b/image-sync/deploy/roles/catchall/molecule/default/molecule.yml @@ -0,0 +1,13 @@ +--- +dependency: + name: galaxy +driver: + name: docker +platforms: + - name: instance + image: ubuntu:latest +lint: ansible-lint --exclude .github/ +provisioner: + name: ansible +verifier: + name: testinfra diff --git a/image-sync/deploy/roles/catchall/molecule/default/tests/test_default.py b/image-sync/deploy/roles/catchall/molecule/default/tests/test_default.py new file mode 100644 index 00000000..5fb4704b --- /dev/null +++ b/image-sync/deploy/roles/catchall/molecule/default/tests/test_default.py @@ -0,0 +1,30 @@ +import hashlib +import os + +import testinfra.utils.ansible_runner + +testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( + os.environ["MOLECULE_INVENTORY_FILE"] +).get_hosts("all") + + +def test_site_files(host): + endpoint_hash = hashlib.md5(b"https://example.com:5000/v3/").hexdigest() + filename = "foo-bar-%s" % endpoint_hash + assert host.file("/etc/egi/cloud-info/").is_directory + assert host.file("/etc/egi/cloud-info/%s.yaml" % filename).exists + assert not host.file("/etc/egi/cloud-info/%s.env" % filename).contains("OS_REGION") + assert host.file("/etc/egi/cloud-info/%s.env" % filename).exists + assert host.file("/etc/cron.d/cloud-info-%s" % filename).exists + + +def test_site_files_region(host): + endpoint_hash = hashlib.md5(b"https://site.org:5000/v3/").hexdigest() + filename = "bar-foo-%s" % endpoint_hash + assert host.file("/etc/egi/cloud-info/").is_directory + assert host.file("/etc/egi/cloud-info/%s.yaml" % filename).exists + assert host.file("/etc/egi/cloud-info/%s.env" % filename).exists + assert host.file("/etc/egi/cloud-info/%s.env" % filename).contains( + "OS_REGION=region1" + ) + assert host.file("/etc/cron.d/cloud-info-%s" % filename).exists diff --git a/image-sync/deploy/roles/catchall/requirements.txt b/image-sync/deploy/roles/catchall/requirements.txt new file mode 100644 index 00000000..c745c484 --- /dev/null +++ b/image-sync/deploy/roles/catchall/requirements.txt @@ -0,0 +1,4 @@ +molecule +molecule-plugins[docker] +pytest-testinfra +ansible-lint diff --git a/image-sync/deploy/roles/catchall/tasks/cloud-info.yml b/image-sync/deploy/roles/catchall/tasks/cloud-info.yml new file mode 100644 index 00000000..5b2c8fe3 --- /dev/null +++ b/image-sync/deploy/roles/catchall/tasks/cloud-info.yml @@ -0,0 +1,38 @@ +--- +- name: Cloud-info dirs + ansible.builtin.file: + path: "{{ item }}" + state: directory + mode: "755" + owner: "{{ egi_user }}" + group: "{{ egi_group }}" + loop: + - /etc/egi/cloud-info + - /var/lock/cloud-info + - /var/log/cloud-info + +- name: Cloud-info config directory + ansible.builtin.template: + src: site-info.yaml.j2 + dest: /etc/egi/cloud-info/{{ filename }}.yaml + mode: "600" + +- name: Cloud info env + ansible.builtin.template: + src: cloud-info.env.j2 + dest: /etc/egi/cloud-info/{{ filename }}.env + mode: "600" + +- name: Cloud info cron + ansible.builtin.cron: + name: cloud-info-provider {{ site.gocdb }} + weekday: "{{ cloud_info_cron.weekday }}" + minute: "{{ cloud_info_cron.minute }}" + hour: "{{ cloud_info_cron.hour }}" + user: root + job: > + flock -n -w {{ cloud_info_cron.timeout }} /var/lock/cloud-info/{{ filename }} + docker run --rm -v /etc/egi:/etc/egi:ro + --env-file /etc/egi/cloud-info/{{ filename }}.env + {{ cloud_info_image }} >> /var/log/cloud-info/{{ filename }}.log 2>&1 + cron_file: "cloud-info-{{ filename }}" diff --git a/image-sync/deploy/roles/catchall/tasks/docker.yml b/image-sync/deploy/roles/catchall/tasks/docker.yml new file mode 100644 index 00000000..15405be8 --- /dev/null +++ b/image-sync/deploy/roles/catchall/tasks/docker.yml @@ -0,0 +1,59 @@ +--- +- name: Install dependencies + ansible.builtin.apt: + name: + - apt-transport-https + - ca-certificates + - curl + - gnupg-agent + - software-properties-common + state: present + update_cache: true + +- name: Docker repo key + ansible.builtin.apt_key: + id: 9DC858229FC7DD38854AE2D88D81803C0EBFCD88 + url: https://download.docker.com/linux/ubuntu/gpg + state: present + +- name: Add docker repo + ansible.builtin.apt_repository: + repo: "deb [arch=amd64] https://download.docker.com/linux/ubuntu {{ ansible_distribution_release }} stable" + state: present + +- name: Install docker + ansible.builtin.apt: + name: + - docker-ce + - docker-ce-cli + - containerd.io + state: present + update_cache: true + +- name: Ensure docker config dir is present + ansible.builtin.file: + path: /etc/docker + state: directory + mode: "775" + +- name: Configure docker + ansible.builtin.copy: + # this is very CESNET-MCC specific, may be better to move as configurable + content: | + { + "mtu": 1442, + "exec-opts": ["native.cgroupdriver=systemd"], + "log-driver": "json-file", + "log-opts": { + "max-size": "100m" + }, + "storage-driver": "overlay2" + } + dest: /etc/docker/daemon.json + mode: "660" + +- name: Restart docker + ansible.builtin.systemd: + name: docker + state: restarted + daemon_reload: true diff --git a/image-sync/deploy/roles/catchall/tasks/main.yml b/image-sync/deploy/roles/catchall/tasks/main.yml new file mode 100644 index 00000000..1da087af --- /dev/null +++ b/image-sync/deploy/roles/catchall/tasks/main.yml @@ -0,0 +1,62 @@ +--- +- name: Ensure cron is available + tags: ["cloud-info", "image-sync"] + ansible.builtin.apt: + name: cron + state: present + update_cache: true + +- name: Install docker + ansible.builtin.include_tasks: + file: docker.yml + apply: + tags: "docker" + # this is only executed if explicity requested + tags: ['never', 'docker'] + +- name: Load site configuration + tags: ["cloud-info", "image-sync"] + ansible.builtin.include_vars: + file: "{{ item }}" + name: "{{ 'site_incl_vars_' ~ item | basename | splitext | first }}" + with_fileglob: + - "{{ site_config_dir }}/*.yaml" + +- name: Set site configuration variable + tags: ["cloud-info", "image-sync"] + ansible.builtin.set_fact: + sites: "{{ sites | default([]) + [lookup('vars', item)] }}" + loop: "{{ query('varnames', '^site_incl_vars_(.*)$') }}" + +- name: EGI configuration directories + tags: ["cloud-info", "image-sync"] + ansible.builtin.file: + path: "{{ item }}" + state: directory + mode: "755" + owner: "{{ egi_user }}" + group: "{{ egi_group }}" + loop: + - /etc/egi + - /etc/egi/vos + +- name: Site specific config + tags: ["cloud-info"] + ansible.builtin.include_tasks: + file: cloud-info.yml + apply: + tags: "cloud-info" + vars: + site: "{{ item }}" + filename: "{{ item.gocdb | replace('.', '-') }}-{{ item.endpoint | hash('md5') }}" + with_items: + - "{{ sites }}" + when: sites is iterable + + +- name: Image sync config + tags: ["image-sync"] + ansible.builtin.include_tasks: + file: sync.yml + apply: + tags: "image-sync" diff --git a/image-sync/deploy/roles/catchall/tasks/sync.yml b/image-sync/deploy/roles/catchall/tasks/sync.yml new file mode 100644 index 00000000..6e27ede3 --- /dev/null +++ b/image-sync/deploy/roles/catchall/tasks/sync.yml @@ -0,0 +1,35 @@ +--- +- name: Sync dirs + ansible.builtin.file: + path: "{{ item }}" + state: directory + mode: "755" + owner: "{{ egi_user }}" + group: "{{ egi_group }}" + loop: + - /etc/egi/image_sync + - /var/cache/image_sync + +- name: sync configuration + ansible.builtin.template: + src: sync.conf.j2 + dest: /etc/egi/image_sync/sync.conf + mode: "600" + owner: "{{ egi_user }}" + group: "{{ egi_group }}" + +- name: Image sync cron + ansible.builtin.cron: + name: image sync + weekday: "{{ image_sync_cron.weekday }}" + minute: "{{ image_sync_cron.minute }}" + hour: "{{ image_sync_cron.hour }}" + user: root + job: > + flock -n -w {{ image_sync_cron.timeout }} /var/lock/sync + docker run --rm -v /etc/egi:/etc/egi:ro + -v {{ site_config_dir }}:{{ site_config_mountpoint }}:ro + -v /var/cache/image_sync:/atrope-state + {{ image_sync_image }} sites-sync + --config-dir /etc/egi/image_sync >> /var/log/sync.log 2>&1 + cron_file: "egi-image-sync" diff --git a/image-sync/deploy/roles/catchall/templates/cloud-info.env.j2 b/image-sync/deploy/roles/catchall/templates/cloud-info.env.j2 new file mode 100644 index 00000000..9c7c6e2a --- /dev/null +++ b/image-sync/deploy/roles/catchall/templates/cloud-info.env.j2 @@ -0,0 +1,14 @@ +AMS_HOST={{ ams_host }} +AMS_PROJECT={{ ams_project }} +AMS_TOKEN={{ ams_token }} +CHECKIN_OIDC_TOKEN={{ checkin_token_endpoint }} +CHECKIN_SECRETS_PATH=/etc/egi/vos/ +CLOUD_INFO_CONFIG=/etc/egi/cloud-info/{{ filename }}.yaml +OS_AUTH_TYPE=v3oidcaccesstoken +OS_AUTH_URL={{ site.endpoint }} +OS_IDENTITY_PROVIDER=egi.eu +OS_PROTOCOL={{ site.protocol | default('openid') }} +{% if "region" in site %} +OS_REGION={{ site.region }} +{% endif %} +SITE_NAME={{ site.gocdb }} diff --git a/image-sync/deploy/roles/catchall/templates/site-info.yaml.j2 b/image-sync/deploy/roles/catchall/templates/site-info.yaml.j2 new file mode 100644 index 00000000..293e3ccf --- /dev/null +++ b/image-sync/deploy/roles/catchall/templates/site-info.yaml.j2 @@ -0,0 +1,9 @@ +site: + name: {{ site.gocdb }} + +compute: + shares: +{% for vo in site.vos %} + {{ vo.name }}: + {{ vo | default({}) | to_nice_yaml(indent=2) | indent(6) }} +{% endfor %} diff --git a/image-sync/deploy/roles/catchall/templates/sync.conf.j2 b/image-sync/deploy/roles/catchall/templates/sync.conf.j2 new file mode 100644 index 00000000..73dd10e3 --- /dev/null +++ b/image-sync/deploy/roles/catchall/templates/sync.conf.j2 @@ -0,0 +1,7 @@ +[sync] +appdb_token = {{ appdb_token }} +site_config_dir = {{ site_config_mountpoint }} + +[checkin] +client_id = {{ checkin.client_id }} +client_secret = {{checkin.client_secret }} diff --git a/image-sync/deploy/roles/catchall/vars/main.yml b/image-sync/deploy/roles/catchall/vars/main.yml new file mode 100644 index 00000000..44b09048 --- /dev/null +++ b/image-sync/deploy/roles/catchall/vars/main.yml @@ -0,0 +1 @@ +# Role variables diff --git a/image-sync/deploy/vars.tf b/image-sync/deploy/vars.tf new file mode 100644 index 00000000..b214778b --- /dev/null +++ b/image-sync/deploy/vars.tf @@ -0,0 +1,14 @@ +variable "net_id" { + type = string + description = "The id of the network" +} + +variable "image_id" { + type = string + description = "VM image id" +} + +variable "flavor_id" { + type = string + description = "VM flavor id" +} diff --git a/image-sync/deploy/versions.tf b/image-sync/deploy/versions.tf new file mode 100644 index 00000000..1f1e7d48 --- /dev/null +++ b/image-sync/deploy/versions.tf @@ -0,0 +1,9 @@ +terraform { + required_providers { + openstack = { + source = "terraform-provider-openstack/openstack" + version = "~> 1.48" + } + } + required_version = ">= 0.13" +} diff --git a/image-sync/pyproject.toml b/image-sync/pyproject.toml new file mode 100644 index 00000000..541b4834 --- /dev/null +++ b/image-sync/pyproject.toml @@ -0,0 +1,24 @@ +[build-system] +requires = ["setuptools>=61"] +build-backend = "setuptools.build_meta" + +[project] +name = "image_sync" +version = "0.0.1" +description = "Sync images with atrope" +authors = [ + { name = "Enol Fernandez", email = "enol.fernandez@egi.eu" }, +] +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", +] +requires-python = ">=3.10" + +[project.scripts] +image-sync = "image_sync.sync:main" + +[tool.setuptools.dynamic] +dependencies = {file = ["requirements.txt"]} + diff --git a/image-sync/requirements.txt b/image-sync/requirements.txt new file mode 100644 index 00000000..1500363a --- /dev/null +++ b/image-sync/requirements.txt @@ -0,0 +1,4 @@ +git+https://github.com/enolfc/atrope@catchall +requests +oslo.config +PyYAML diff --git a/image-sync/src/image_sync/__init__.py b/image-sync/src/image_sync/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/image-sync/src/image_sync/sync.py b/image-sync/src/image_sync/sync.py new file mode 100644 index 00000000..7e4aea71 --- /dev/null +++ b/image-sync/src/image_sync/sync.py @@ -0,0 +1,174 @@ +import glob +import logging +import os +import os.path +import subprocess +import sys +import tempfile + +import requests +import yaml +from oslo_config import cfg + +# Configuraion +CONF = cfg.CONF +CONF.register_opts( + [ + cfg.StrOpt("site_config_dir", default="."), + cfg.StrOpt("graphql_url", default="https://is.appdb.egi.eu/graphql"), + cfg.ListOpt("formats", default=[]), + cfg.StrOpt("appdb_token"), + ], + group="sync", +) + +# Check-in config +checkin_grp = cfg.OptGroup("checkin") +CONF.register_opts( + [ + cfg.StrOpt("client_id"), + cfg.StrOpt("client_secret"), + cfg.StrOpt("scopes", default="openid profile eduperson_entitlement email"), + cfg.StrOpt( + "discovery_endpoint", + default="https://aai.egi.eu/auth/realms/egi/.well-known/openid-configuration", + ), + ], + group="checkin", +) + + +def fetch_site_info(): + logging.debug("Fetching site info from AppDB") + query = """ + { + siteCloudComputingEndpoints{ + items{ + endpointURL + site { + name + } + shares: shareList { + VO + entityCreationTime + projectID + } + } + } + } + """ + params = {"query": query} + r = requests.get( + CONF.sync.graphql_url, params=params, headers={"accept": "application/json"} + ) + r.raise_for_status() + data = r.json()["data"]["siteCloudComputingEndpoints"]["items"] + return data + + +def dump_atrope_config(site, share, hepix_file): + config_template = """ +[DEFAULT] +state_path = /atrope-state/ + +[glance] +auth_type = v3oidcclientcredentials +auth_url = {auth_url} +protocol = openid +identity_provider = egi.eu +client_id = {client_id} +client_secret = {client_secret} +scope = {scopes} +discovery_endpoint = {discovery_endpoint} +project_id = {project_id} +access_token_type = access_token +formats = {formats} + +[dispatchers] +dispatcher = glance + +[cache] +formats = {formats} + +[sources] +hepix_sources = {hepix_file} + """ + formats = site.get("formats", CONF.sync.formats) + return config_template.format( + auth_url=site["endpointURL"], + client_id=CONF.checkin.client_id, + client_secret=CONF.checkin.client_secret, + scopes=CONF.checkin.scopes, + discovery_endpoint=CONF.checkin.discovery_endpoint, + project_id=share["projectID"], + formats=",".join(formats), + hepix_file=hepix_file, + ) + + +def dump_hepix_config(share): + hepix = { + share["VO"]: { + "enabled": True, + "endorser": { + "ca": "/DC=ORG/DC=SEE-GRID/CN=SEE-GRID CA 2013", + "dn": "/DC=EU/DC=EGI/C=NL/O=Hosts/O=EGI.eu/CN=appdb.egi.eu", + }, + "prefix": "EGI ", + "project": share["projectID"], + "token": CONF.sync.appdb_token, + "url": f"https://vmcaster.appdb.egi.eu/store/vo/{share['VO']}/image.list", + } + } + return yaml.dump(hepix) + + +def do_sync(sites_config): + sites_info = fetch_site_info() + for site in sites_info: + site_name = site["site"]["name"] + # filter out those sites that are not part of the centralised ops + if site_name not in sites_config: + logging.debug(f"Discarding site {site_name}, not in config.") + continue + site_image_config = sites_config[site_name].get("images", {}) + if not site_image_config.get("sync", False): + logging.debug(f"Discarding site {site_name}, no sync set.") + continue + site.update(site_image_config) + logging.info(f"Configuring site {site_name}") + for share in site["shares"]: + logging.info(f"Configuring {share['VO']}") + with tempfile.TemporaryDirectory() as tmpdirname: + hepix_file = os.path.join(tmpdirname, "hepix.yaml") + with open(os.path.join(tmpdirname, "atrope.conf"), "w+") as f: + f.write(dump_atrope_config(site, share, hepix_file)) + with open(hepix_file, "w+") as f: + f.write(dump_hepix_config(share)) + cmd = [ + "atrope", + "--config-dir", + tmpdirname, + "sync", + ] + logging.debug(f"Running {' '.join(cmd)}") + subprocess.call(cmd) + + +def load_sites(): + sites = {} + for site_file in glob.iglob("*.yaml", root_dir=CONF.sync.site_config_dir): + with open(os.path.join(CONF.sync.site_config_dir, site_file), "r") as f: + site = yaml.safe_load(f.read()) + sites[site["gocdb"]] = site + return sites + + +def main(): + CONF(sys.argv[1:]) + logging.basicConfig(level=logging.DEBUG) + do_sync(load_sites()) + + +if __name__ == "__main__": + main() diff --git a/sites/100IT.yaml b/sites/100IT.yaml deleted file mode 100644 index 6c5804ec..00000000 --- a/sites/100IT.yaml +++ /dev/null @@ -1,16 +0,0 @@ ---- -gocdb: 100IT -endpoint: https://cloud-egi.100percentit.com:5000/v3/ -vos: -- name: dteam - auth: - project_id: 0785b78c70ce4e9f91bf851a4fd156b2 -- name: fedcloud.egi.eu - auth: - project_id: 7cfb9087632a4df5b738fbd0b795f2ad -- name: ops - auth: - project_id: 24426f375b494df5b47f1efebc272e5c -- name: vo.digitbrain.eu - auth: - project_id: 3d181dc16e654078a9cbbe1a9df6228f diff --git a/sites/CESGA.yaml b/sites/CESGA.yaml index 13c294ad..404e2a2a 100644 --- a/sites/CESGA.yaml +++ b/sites/CESGA.yaml @@ -2,9 +2,6 @@ gocdb: CESGA endpoint: https://fedcloud-osservices.egi.cesga.es:5000/v3 vos: -- name: blazarmonitoring.asi.it - auth: - project_id: 835c2dd1be6f4e319dfce60b34c1b1c3 - name: covid19.eosc-synergy.eu auth: project_id: 972298c557184a2192ebc861f3184da8 diff --git a/sites/CESNET-MCC.yaml b/sites/CESNET-MCC.yaml index fb29e17b..42810c89 100644 --- a/sites/CESNET-MCC.yaml +++ b/sites/CESNET-MCC.yaml @@ -5,9 +5,6 @@ vos: - name: biomed auth: project_id: eca73ad6a84d4c0088063505c36349ab -- name: chipster.csc.fi - auth: - project_id: 2ce965f162c9434cb21ac25f3b05bd9f - name: covid19.eosc-synergy.eu auth: project_id: 192e87f713474117a2a22704ac4da1a2 @@ -87,9 +84,6 @@ vos: - name: waterwatch.c-scale.eu auth: project_id: acf5d12568914e65a80150efb087cbb8 -- name: vo.carouseldancing.org - auth: - project_id: 7ace5ca998b849619bd34e922fbd2cb7 - name: vo.pangeo.eu auth: project_id: 05e0ff6e03774082aadacc75bfc1d783 diff --git a/sites/GSI-LCG2.yaml b/sites/GSI-LCG2.yaml deleted file mode 100644 index 85d95e3b..00000000 --- a/sites/GSI-LCG2.yaml +++ /dev/null @@ -1,25 +0,0 @@ ---- -gocdb: GSI-LCG2 -endpoint: https://egiosc.gsi.de:5000/v3 -vos: -- name: dteam - auth: - project_id: b761864a53df410d949c87a958a36619 -- name: fedcloud.egi.eu - auth: - project_id: 83ce3056c13c4dc4a5763e9fd1aa34ad -- name: ops - auth: - project_id: c585e9ae796649f2be13d113a43749aa -- name: vo.access.egi.eu - auth: - project_id: 1392719e6e4c4bf7ba0fce8c0acbbd22 -- name: vo.envrihub.eu - auth: - project_id: 09ea02290dcb4aa0b3bde9b179a45c43 -- name: vo.complex-systems.eu - auth: - project_id: 260beefcea224be6a278ccb8e89668dd -- name: vo.inteligg.com - auth: - project_id: 9fe5330bec51464bad89d368d73b88fc diff --git a/sites/IISAS-FedCloud-cloud.yaml b/sites/IISAS-FedCloud-cloud.yaml index 76f2be5a..36eaea93 100644 --- a/sites/IISAS-FedCloud-cloud.yaml +++ b/sites/IISAS-FedCloud-cloud.yaml @@ -26,27 +26,15 @@ vos: - name: vo.access.egi.eu auth: project_id: 71dc9c3785cc4876bfb1a4bfc681e0f3 -- name: vo.matrycs.eu - auth: - project_id: 6c2453d04ee544868b70684928d96337 - name: vo.bd4nrg.eu auth: project_id: 74128e581e6941ad91c7fce06f14e6c8 - name: vo.oipub.com auth: project_id: e188494f39564812987a4ee5f58084a7 -- name: vo.e-rihs.eu - auth: - project_id: 6eb56eb83eb54aad9c3226cdeb716c8c -- name: vo.beamide.com - auth: - project_id: 5139b731becd4f0b8fbf5fc19b909d58 - name: vo.ai4eosc.eu auth: project_id: 4d40037774784f22907d537070e85f42 -- name: vo.latitudo40.com.eu - auth: - project_id: 8cab8753df654e92a763d756905715fd - name: vo.usegalaxy.eu auth: project_id: 7a23c4fa956a499c854eb215f3a95218 diff --git a/sites/INFN-CLOUD-BARI.yaml b/sites/INFN-CLOUD-BARI.yaml index d1cd3b1e..4dc95cd0 100644 --- a/sites/INFN-CLOUD-BARI.yaml +++ b/sites/INFN-CLOUD-BARI.yaml @@ -2,6 +2,11 @@ gocdb: INFN-CLOUD-BARI endpoint: https://keystone.recas.ba.infn.it/v3 protocol: openid +images: + sync: true + format: + - qcow2 + - raw vos: - name: ops auth: diff --git a/sites/NCG-INGRID-PT.yaml b/sites/NCG-INGRID-PT.yaml index 80a12aa6..501b1c5e 100644 --- a/sites/NCG-INGRID-PT.yaml +++ b/sites/NCG-INGRID-PT.yaml @@ -5,33 +5,45 @@ vos: - name: aquamonitor.c-scale.eu auth: project_id: 8258f24c93b14473ba58892f5f2748f4 +- name: bioisi + auth: + project_id: fa764d911b1d4e5eab8e51a186d813ee - name: cloud.egi.eu auth: project_id: 6b042927bcfa466cb9eb56d3ea679987 -- name: covid19.eosc-synergy.eu +- name: dev.intertwin.eu auth: - project_id: 05e52356addc44e18ef2bd14f2e2f67d + project_id: 7e08ed7db02847a1858ce8ed0f93be3c - name: eosc-synergy.eu auth: project_id: ddf0c468c8af4e0bbb9808bfc0288381 - name: fedcloud.egi.eu auth: project_id: bd5a81e1670b48f18af33b05512a9d77 -- name: lagoproject.net - auth: - project_id: 14e8f964db4c4a8ea286b0886ab51e7c - name: opencoast.eosc-hub.eu auth: project_id: b0cea6bd85844b0693ceda70d9f94a09 - name: ops auth: - project_id: f9b54a925f8a467bba107f577dae7479 + project_id: bd5a81e1670b48f18af33b05512a9d77 - name: training.egi.eu auth: project_id: e51b8a89b30945adbf52a5d568912e4c - name: vo.access.egi.eu auth: project_id: bd5a81e1670b48f18af33b05512a9d77 +- name: vo.ai4eosc.eu + auth: + project_id: c61c1bb323414a248cb142eb6183d4b2 +- name: vo.envrihub.eu + auth: + project_id: e6d00f696beb482c8d6900f8a88ddd69 +- name: vo.imagine-ai.eu + auth: + project_id: 009f77df459b4a6389910e0fb20ddcaf +- name: vo.lifewatch.eu + auth: + project_id: f0c662a2d69d4072bdd9334ff9852c2d - name: worsica.vo.incd.pt auth: project_id: a53ca78c534046e5b13f4537ae698411 diff --git a/sites/UNIV-LILLE.yaml b/sites/UNIV-LILLE.yaml deleted file mode 100644 index 8c0b92bb..00000000 --- a/sites/UNIV-LILLE.yaml +++ /dev/null @@ -1,16 +0,0 @@ ---- -gocdb: UNIV-LILLE -endpoint: https://thor.univ-lille.fr:5000/v3 -vos: -- name: dteam - auth: - project_id: aa893955520a4a98a04453808d6f1232 -- name: fedcloud.egi.eu - auth: - project_id: def49ef0ac58495f90d8aa2463b33b10 -- name: ops - auth: - project_id: bee15053597542e3bdd4d7c281eef650 -- name: vo.access.egi.eu - auth: - project_id: e57db5c1e0b24bcbac9904f4ac22dcaa diff --git a/sites/UPV-GRyCAP.yaml b/sites/UPV-GRyCAP.yaml deleted file mode 100644 index f9688baf..00000000 --- a/sites/UPV-GRyCAP.yaml +++ /dev/null @@ -1,16 +0,0 @@ ---- -gocdb: UPV-GRyCAP -endpoint: https://menoscloud.i3m.upv.es:5000/v3 -vos: -- name: eosc-synergy.eu - auth: - project_id: 6f84e31391024330b16d29d6ccd26932 -- name: fedcloud.egi.eu - auth: - project_id: db929e9034f04d1698c1a0d58283366e -- name: ops - auth: - project_id: 292568ead7454709a17f19189d5a840a -- name: saps-vo.i3m.upv.es - auth: - project_id: e7608e969cfd4f49907cff17d1774898 diff --git a/sites/WALTON-CLOUD.yaml b/sites/WALTON-CLOUD.yaml index 9c7138e2..e68a0a87 100644 --- a/sites/WALTON-CLOUD.yaml +++ b/sites/WALTON-CLOUD.yaml @@ -1,6 +1,11 @@ --- gocdb: WALTON-CLOUD endpoint: https://horizon.waltoncloud.eu:5000/v3 +images: + sync: true + formats: + - qcow2 + - raw vos: - name: ops auth: diff --git a/validate.sh b/validate.sh index 873aeaa2..46f40543 100755 --- a/validate.sh +++ b/validate.sh @@ -6,82 +6,72 @@ exit_value=0 # Get all VOs names VO_LIST=$(mktemp) -curl --silent "http://cclavoisier01.in2p3.fr:8080/lavoisier/VoList?accept=json" \ - | jq -r ".data[].name" > "$VO_LIST" +curl --silent "http://cclavoisier01.in2p3.fr:8080/lavoisier/VoList?accept=json" | + jq -r ".data[].name" >"$VO_LIST" # Get fedcloudclient sites FEDCLOUD_CLI_SITES=$(mktemp) curl "https://raw.githubusercontent.com/tdviet/fedcloudclient/master/config/sites.yaml" \ - > "$FEDCLOUD_CLI_SITES" + >"$FEDCLOUD_CLI_SITES" # Temp file for nova endpoint NOVA_ENDPOINT=$(mktemp) -for f in sites/*.yaml -do - goc_site=$(grep "^gocdb:" "$f" | cut -f2 -d":" | tr -d "[:space:]") - endpoint=$(grep "^endpoint:" "$f" | cut -f2- -d":" | tr -d "[:space:]") - printf "Searching for endpoint %s in %s site (%s)\n" \ - "$endpoint" "$goc_site" "$f" - curl --silent "$goc_method&sitename=$goc_site&service_type=org.openstack.nova" \ - > "$NOVA_ENDPOINT" - if ! grep -q "$goc_site" "$NOVA_ENDPOINT" - then - printf "\033[0;31m[ERROR] Site %s not found in GOC\033[0m\n" "$goc_site" - exit_value=1 - continue - fi - if ! grep -q "$endpoint" "$NOVA_ENDPOINT" - then - printf "\033[0;31m[ERROR] URL %s for %s not found in GOC\033[0m\n" \ - "$endpoint" "$goc_site" - exit_value=1 - else - printf "\033[0;32m[OK]\033[0m\n" - fi - # check if all VOs configured do exist - # Try to use FQAN - # So the VO that comes from the file, it will be either: - # - just the name of the VO - # - //some more extra/ - # - /VO=/some more stuff/ - for vo in $(yq -r ".vos[].name" < "$f" | cut -f2 -d"/" | sed "s/^VO=//") - do - if ! grep -q "^$vo\$" "$VO_LIST" - then - printf "\033[0;31m[ERROR] VO %s not found in ops portal\033[0m\n" \ - "$vo" - exit_value=1 - fi - done +for f in sites/*.yaml; do + goc_site=$(grep "^gocdb:" "$f" | cut -f2 -d":" | tr -d "[:space:]") + endpoint=$(grep "^endpoint:" "$f" | cut -f2- -d":" | tr -d "[:space:]") + printf "Searching for endpoint %s in %s site (%s)\n" \ + "$endpoint" "$goc_site" "$f" + curl --silent "$goc_method&sitename=$goc_site&service_type=org.openstack.nova" \ + >"$NOVA_ENDPOINT" + if ! grep -q "$goc_site" "$NOVA_ENDPOINT"; then + printf "\033[0;31m[ERROR] Site %s not found in GOC\033[0m\n" "$goc_site" + exit_value=1 + continue + fi + if ! grep -q "$endpoint" "$NOVA_ENDPOINT"; then + printf "\033[0;31m[ERROR] URL %s for %s not found in GOC\033[0m\n" \ + "$endpoint" "$goc_site" + exit_value=1 + else + printf "\033[0;32m[OK]\033[0m\n" + fi + # check if all VOs configured do exist + # Try to use FQAN + # So the VO that comes from the file, it will be either: + # - just the name of the VO + # - //some more extra/ + # - /VO=/some more stuff/ + for vo in $(yq -r ".vos[].name" <"$f" | cut -f2 -d"/" | sed "s/^VO=//"); do + if ! grep -q "^$vo\$" "$VO_LIST"; then + printf "\033[0;31m[ERROR] VO %s not found in ops portal\033[0m\n" \ + "$vo" + exit_value=1 + fi + done - # check if site is also on: - # https://github.com/tdviet/fedcloudclient/blob/master/config/sites.yaml - if ! grep -q "$f" "$FEDCLOUD_CLI_SITES" - then - printf "\033[0;31m[ERROR] Site %s not found in fedcloudclient\033[0m\n" "$goc_site" - exit_value=1 - fi + # check if site is also on: + # https://github.com/tdviet/fedcloudclient/blob/master/config/sites.yaml + if ! grep -q "$f" "$FEDCLOUD_CLI_SITES"; then + printf "\033[0;31m[ERROR] Site %s not found in fedcloudclient\033[0m\n" "$goc_site" + exit_value=1 + fi done -for site in $(yq -r '.[]' < "$FEDCLOUD_CLI_SITES") -do - if ! test -s "sites/$(basename "$site")" - then - printf "\033[0;31m[ERROR] Site %s not found in fedcloud-catchall-operations\033[0m\n" "$(basename "$site")" - exit_value=1 - fi +for site in $(yq -r '.[]' <"$FEDCLOUD_CLI_SITES"); do + if ! test -s "sites/$(basename "$site")"; then + printf "\033[0;31m[ERROR] Site %s not found in fedcloud-catchall-operations\033[0m\n" "$(basename "$site")" + exit_value=1 + fi done # check that the VO mappings are up to date according to ops portal -for vo in $(yq -r '.vos | keys[]' < vo-mappings.yaml | cut -f2 -d"/" | sed "s/^VO=//") -do - if ! grep -q "^$vo\$" "$VO_LIST" - then - printf "\033[0;31m[ERROR] VO %s not found in ops portal\033[0m\n" \ - "$vo" - exit_value=1 - fi +for vo in $(yq -r '.vos | keys[]'