diff --git a/cloud-info/ams-wrapper.sh b/cloud-info/ams-wrapper.sh index 202c9fc9..c2f05714 100755 --- a/cloud-info/ams-wrapper.sh +++ b/cloud-info/ams-wrapper.sh @@ -51,7 +51,6 @@ if token-generator; then fi fi - # Any OS related parameter should be available as env variables if test "$CHECKIN_SECRETS_PATH" = ""; then # Case 1: manual config @@ -62,8 +61,10 @@ if test "$CHECKIN_SECRETS_PATH" = ""; then else # use service account for everyone export OS_DISCOVERY_ENDPOINT="https://aai.egi.eu/auth/realms/egi/.well-known/openid-configuration" - export OS_CLIENT_ID="$(yq -r '.fedcloudops.client_id' <"$CHECKIN_SECRETS_FILE")" - export OS_CLIENT_SECRET="$(yq -r '.fedcloudops.client_secret' <"$CHECKIN_SECRETS_FILE")" + OS_CLIENT_ID="$(yq -r '.fedcloudops.client_id' <"$CHECKIN_SECRETS_FILE")" + export OS_CLIENT_ID + OS_CLIENT_SECRET="$(yq -r '.fedcloudops.client_secret' <"$CHECKIN_SECRETS_FILE")" + export OS_CLIENT_SECRET export OS_ACCESS_TOKEN_TYPE="access_token" export OS_AUTH_TYPE="v3oidcclientcredentials" export OS_OPENID_SCOPE="openid profile eduperson_entitlement email" @@ -101,10 +102,14 @@ if [ -f site.json ]; then # Put this info into S3, configure rclone config with # a provider named "s3" using env variables export RCLONE_CONFIG_S3_TYPE=s3 - export RCLONE_CONFIG_S3_ACCESS_KEY_ID="$(yq -r '.s3.access_key_id' <"$CHECKIN_SECRETS_FILE")" - export RCLONE_CONFIG_S3_SECRET_ACCESS_KEY="$(yq -r '.s3.secret_access_key' <"$CHECKIN_SECRETS_FILE")" - export RCLONE_CONFIG_S3_ENDPOINT="$(yq -r '.s3.endpoint' <"$CHECKIN_SECRETS_FILE")" - export S3_BUCKET_NAME="$(yq -r '.s3.bucket' <"$CHECKIN_SECRETS_FILE")" + RCLONE_CONFIG_S3_ACCESS_KEY_ID="$(yq -r '.s3.access_key_id' <"$CHECKIN_SECRETS_FILE")" + export RCLONE_CONFIG_S3_ACCESS_KEY_ID + RCLONE_CONFIG_S3_SECRET_ACCESS_KEY="$(yq -r '.s3.secret_access_key' <"$CHECKIN_SECRETS_FILE")" + export RCLONE_CONFIG_S3_SECRET_ACCESS_KEY + RCLONE_CONFIG_S3_ENDPOINT="$(yq -r '.s3.endpoint' <"$CHECKIN_SECRETS_FILE")" + export RCLONE_CONFIG_S3_ENDPOINT + S3_BUCKET_NAME="$(yq -r '.s3.bucket' <"$CHECKIN_SECRETS_FILE")" + export S3_BUCKET_NAME export RCLONE_CONFIG_S3_ACL=private export RCLONE_CONFIG_S3_NO_CHECK_BUCKET=true rclone copy site.json "s3:$S3_BUCKET_NAME/$SITE_NAME" diff --git a/cloud-info/cloud_info_catchall/test_token_generator.py b/cloud-info/cloud_info_catchall/test_token_generator.py index 46a52ac3..e2b6e50d 100644 --- a/cloud-info/cloud_info_catchall/test_token_generator.py +++ b/cloud-info/cloud_info_catchall/test_token_generator.py @@ -99,7 +99,10 @@ def test_valid_token_expired_exception(self, m_calendar, m_decode, m_header, m_a @patch("cloud_info_catchall.token_generator.valid_token") @patch("cloud_info_catchall.token_generator.get_access_token") def test_generate_tokens(self, m_get_access, m_valid_token): - tokens = {"foo": {"access_token": "abc"}, "bar": {"access_token": "def"}} + tokens = { + "foo": {"client_id": "x", "client_secret": "y", "access_token": "abc"}, + "bar": {"client_id": "y", "client_secret": "f", "access_token": "def"}, + } secrets = {"foo": {}, "bar": {}} m_valid_token.side_effect = [True, False] m_get_access.return_value = "xyz" diff --git a/deploy/deploy.sh b/deploy/deploy.sh index bd030f50..b283ad06 100755 --- a/deploy/deploy.sh +++ b/deploy/deploy.sh @@ -34,7 +34,6 @@ cp secrets.yaml /etc/egi/vos/secrets.yaml # make sure the container user (999) can access the files chown -R 999:999 /etc/egi/ - GITHUB_COMMIT_URL="https://api.github.com/repos/EGI-Federation/fedcloud-catchall-operations/commits/$COMMIT_SHA/pulls" # Find out PR we need to update