diff --git a/.github/workflows/buildpipeline.yaml b/.github/workflows/ci.yaml similarity index 67% rename from .github/workflows/buildpipeline.yaml rename to .github/workflows/ci.yaml index e8a4745c2..9d7ab6592 100644 --- a/.github/workflows/buildpipeline.yaml +++ b/.github/workflows/ci.yaml @@ -1,24 +1,27 @@ name: CI - on: - push: - pull_request: - types: [opened, reopened] - + push: + pull_request: + types: [opened, reopened] concurrency: group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' cancel-in-progress: true - - jobs: Security: name: Security Pipeline uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@master secrets: inherit + UnitTest: + name: Python Unit Test with Postgres + uses: uc-cdis/.github/.github/workflows/python_unit_test.yaml@master + with: + python-version: '3.9' + test-script: 'tests/ci_commands_script.sh' + run-coveralls: true ci: name: Build Image and Push - # TODO Add this line back once we update to Python 3.9 from 3.6 + # TODO Uncomment after PXP-9212 # needs: Security uses: uc-cdis/.github/.github/workflows/image_build_push.yaml@master secrets: diff --git a/.secrets.baseline b/.secrets.baseline index 8078c23c4..9aa531c19 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -115,13 +115,13 @@ } ], "results": { - ".github/workflows/buildpipeline.yaml": [ + ".github/workflows/ci.yaml": [ { "type": "Secret Keyword", - "filename": ".github/workflows/buildpipeline.yaml", + "filename": ".github/workflows/ci.yaml", "hashed_secret": "3e26d6750975d678acb8fa35a0f69237881576b0", "is_verified": false, - "line_number": 17 + "line_number": 13 } ], "deployment/scripts/postgresql/postgresql_init.sql": [ @@ -210,13 +210,22 @@ "line_number": 137 } ], + "fence/resources/storage/storageclient/cleversafe.py": [ + { + "type": "Secret Keyword", + "filename": "fence/resources/storage/storageclient/cleversafe.py", + "hashed_secret": "7cb6efb98ba5972a9b5090dc2e517fe14d12cb04", + "is_verified": false, + "line_number": 274 + } + ], "fence/utils.py": [ { "type": "Secret Keyword", "filename": "fence/utils.py", "hashed_secret": "8318df9ecda039deac9868adf1944a29a95c7114", "is_verified": false, - "line_number": 128 + "line_number": 129 } ], "migrations/versions/a04a70296688_non_unique_client_name.py": [ @@ -259,14 +268,14 @@ "filename": "tests/conftest.py", "hashed_secret": "1348b145fa1a555461c1b790a2f66614781091e9", "is_verified": false, - "line_number": 1559 + "line_number": 1569 }, { "type": "Base64 High Entropy String", "filename": "tests/conftest.py", "hashed_secret": "227dea087477346785aefd575f91dd13ab86c108", "is_verified": false, - "line_number": 1582 + "line_number": 1593 } ], "tests/credentials/google/test_credentials.py": [ @@ -385,6 +394,24 @@ "line_number": 300 } ], + "tests/storageclient/storage_client_mock.py": [ + { + "type": "Secret Keyword", + "filename": "tests/storageclient/storage_client_mock.py", + "hashed_secret": "37bbea9557f9efd1eeadb25dda9ab6514f08fde9", + "is_verified": false, + "line_number": 158 + } + ], + "tests/storageclient/test_cleversafe_api_client.py": [ + { + "type": "Secret Keyword", + "filename": "tests/storageclient/test_cleversafe_api_client.py", + "hashed_secret": "f683c485d521c2e45830146dd570111770baea29", + "is_verified": false, + "line_number": 130 + } + ], "tests/test-fence-config.yaml": [ { "type": "Basic Auth Credentials", @@ -395,5 +422,5 @@ } ] }, - "generated_at": "2023-11-16T21:15:57Z" + "generated_at": "2024-07-25T17:19:58Z" } diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 597d23fc4..000000000 --- a/.travis.yml +++ /dev/null @@ -1,53 +0,0 @@ -language: python -dist: jammy -python: - - "3.9" - -sudo: false - -cache: pip - -services: - - postgresql - -addons: - postgresql: "13" - apt: - sources: - - sourceline: deb http://apt.postgresql.org/pub/repos/apt/ jammy-pgdg main - 13 - key_url: https://www.postgresql.org/media/keys/ACCC4CF8.asc - packages: - - postgresql-13 - -before_install: - # Copy custom configs from the repo because PG-13 isn't set up to run like - # it normally does on Travis out of the box. - # Source: https://github.com/NCI-GDC/psqlgraph/blob/94f315db2c039217752cba85d9c63988f2059317/.travis.yml - - sudo cp travis/postgresql.conf /etc/postgresql/13/main/postgresql.conf - - sudo cp travis/pg_hba.conf /etc/postgresql/13/main/pg_hba.conf - - sudo pg_ctlcluster 13 main restart - -install: - - pip install --upgrade pip - - curl -sSL https://install.python-poetry.org | python - - - which poetry - - poetry --version - - poetry install --all-extras -vv --no-interaction - - poetry show -vv - - psql -c 'SELECT version();' -U postgres - - psql -U postgres -c "create database fence_test_tmp" - - pip list - -before_script: - - sudo rm -f /etc/boto.cfg - - mkdir -p tests/resources/keys; cd tests/resources/keys; openssl genrsa -out test_private_key.pem 2048; openssl rsa -in test_private_key.pem -pubout -out test_public_key.pem - - openssl genrsa -out test_private_key_2.pem 2048; openssl rsa -in test_private_key_2.pem -pubout -out test_public_key_2.pem - - cd - - -script: - - poetry run pytest -vv --cov=fence --cov=migrations/versions --cov-report xml tests - -after_script: - - python-codacy-coverage -r coverage.xml - - COVERALLS_REPO_TOKEN=$COVERALLS_TOKEN coveralls diff --git a/README.md b/README.md index ac413b9ac..fc4ef7025 100644 --- a/README.md +++ b/README.md @@ -9,93 +9,25 @@ only trusted entities to enter. Fence is a core service of the Gen3 stack that has multiple capabilities: -1. Act as an [auth broker](#auth-broker) to integrate with one or more [IdPs](#IdP) and provide downstream authentication and authorization for Gen3 services. -2. [Manage tokens](#token-management). -3. Act as an [OIDC provider](#oidc--oauth2) to support external applications to use Gen3 services. -4. [Issue short lived, cloud native credentials to access data in various cloud storage services](#accessing-data) +1. Act as an [auth broker](docs/additional_documentation/terminology.md#auth-broker) to integrate with one +or more [IdPs](docs/additional_documentation/terminology.md#idp) and provide downstream authentication +and authorization for Gen3 services. +2. [Manage tokens](docs/additional_documentation/token_management.md). +3. Act as an [OIDC provider](README.md#oidc--oauth2) to support external +applications to use Gen3 services. +4. [Issue short-lived, cloud native credentials to access data in various cloud storage services](docs/additional_documentation/data_access.md#accessing-data) -## Contents +## Overview -1. [API Documentation](#API-documentation) -1. [Terminologies](#Terminologies) -1. [Identity Providers](#identity-providers) -1. [OIDC & OAuth2](#oidc--oauth2) -1. [Accessing Data](#accessing-data) -1. [Setup](#setup) -1. [Token management](#token-management) -1. [fence-create](#fence-create-automating-common-tasks-with-a-command-line-interface) -1. [Default expiration times](#default-expiration-times-in-fence) +### Identity Providers -## API Documentation - -[OpenAPI documentation available here.](http://petstore.swagger.io/?url=https://raw.githubusercontent.com/uc-cdis/fence/master/openapis/swagger.yaml) - -YAML file for the OpenAPI documentation is found in the `openapis` folder (in -the root directory); see the README in that folder for more details. - -## Terminologies - -### AuthN - -Authentication - establishes "who you are" with the application through communication with an [Identity Provider](#IdP). - -### AuthZ - -Authorization - establishes "what you can do" and "which resources you have access to" within the application. - -### IdP - -Identity Provider - the service that lets a user login and provides the identity of the user to downstream services. Examples: Google login, University login, NIH Login. - -### Auth broker - -An interface which enables a user to authenticate using any of multiple IdPs. - -### OAuth2 - -A widely used AuthZ protocol for delegating access to an application to use resources on behalf of a user. - -https://tools.ietf.org/html/rfc6749 - -https://oauth.net/2/ - -#### Client - -OAuth 2.0 Client - An application which makes requests for protected resources (on a resource server) on behalf of a resource owner (end-user) and with the resource owner's authorization. - -#### Auth Server - -OAuth 2.0 Authorization Server - A server which issues access tokens to the client after successfully authenticating the resource owner and obtaining authorization. - -#### Access Token - -A string, issued by the auth server to the client, representing authorization credentials used to access protected resources (on a resource server). - -### OIDC - -OpenID Connect - an extension of OAuth2 which provides an AuthN layer on top of the OAuth 2.0 AuthZ layer. It introduced a new type of token, the id token, that is specifically designed to be consumed by clients to get the identity information of the user. - -http://openid.net/specs/openid-connect-core-1_0.html - -#### OP - -OpenID Provider - an OAuth 2.0 Authentication Server which also implements OpenID Connect. - -#### RP - -Relying Party - an OAuth 2.0 Client which uses (requests) OpenID Connect. - - - -## Identity Providers - Fence can be configured to support different Identity Providers (IdPs) for AuthN. At the moment, supported IDPs include: - Google -- [Shibboleth](docs/fence_shibboleth.md) +- [Shibboleth](docs/additional_documentation/fence_shibboleth.md) - NIH iTrust - InCommon - eduGAIN @@ -106,6 +38,14 @@ At the moment, supported IDPs include: - ORCID - RAS +### API Documentation + +[OpenAPI documentation available here.](http://petstore.swagger.io/?url=https://raw.githubusercontent.com/uc-cdis/fence/master/openapis/swagger.yaml) + +YAML file for the OpenAPI documentation is found in the `openapis` folder (in +the root directory); see the README in that folder for more details. + + ## OIDC & OAuth2 Fence acts as a central broker that supports multiple IdPs. @@ -134,14 +74,14 @@ Note that the `3rd Party App` acts as the `RP` in these examples. #### Flow: Client Registration -![Client Registration](docs/images/seq_diagrams/client_registration.png) +![Client Registration](./docs/images/seq_diagrams/client_registration.png) #### Flow: OpenID Connect In the following flow, Fence and the IdP together constitute an `OP`. Fence, by itself, acts as an OAuth 2.0 Auth Server; the IdP enables the additional implementation of OIDC (by providing AuthN). From an OIDC viewpoint, therefore, Fence and the IdP can be abstracted into one `OP`. -![OIDC Flow](docs/images/seq_diagrams/openid_connect_flow.png) +![OIDC Flow](./docs/images/seq_diagrams/openid_connect_flow.png) If the third-party application doesn't need to use any Gen3 resources (and just wants to authenticate the user), they can just get @@ -155,479 +95,48 @@ passed in an `Authorization` header. In the following flow, `3rd Party App` is the `RP`; `Protected Endpoint` is an endpoint of a Gen3 Resource (the `microservice`), and both of these are part of a `resource server`; and `Fence` is the `OP`. Here, importantly, `Fence` may be interfacing with another IdP _or_ with another `Fence` instance in order to implement the OIDC layer. Either way, note that the `Fence` blob in this diagram actually abstracts Fence in concert with some IdP, which may or may not also be (a different instance of) Fence. -![Using Access Token](docs/images/seq_diagrams/token_use_for_access.png) +![Using Access Token](./docs/images/seq_diagrams/token_use_for_access.png) #### Flow: Refresh Token Use -![Using Refresh Token](docs/images/seq_diagrams/refresh_token_use.png) +![Using Refresh Token](./docs/images/seq_diagrams/refresh_token_use.png) #### Flow: Refresh Token Use (Token is Expired) -![Using Expired Refresh Token](docs/images/seq_diagrams/refresh_token_use_expired.png) +![Using Expired Refresh Token](./docs/images/seq_diagrams/refresh_token_use_expired.png) #### Flow: Multi-Tenant Fence The following diagram illustrates the case in which one fence instance uses another fence instance as its identity provider. -A use case for this is when we setup a fence instance that uses NIH login as the IdP. Here, we go through a detailed approval process in NIH. Therefore we would like to do it only once for a single lead Fence instance, and then allow other fence instances to simply redirect to use the lead Fence as an IdP for logging in via NIH. +A use case for this is when we set up a fence instance that uses NIH login as the IdP. Here, we go through a detailed approval process in NIH. Therefore, we would like to do it only once for a single lead Fence instance, and then allow other fence instances to simply redirect to use the lead Fence as an IdP for logging in via NIH. In the following flow, `Fence (Client Instance)` is an OP relative to `OAuth Client`, but an RP relative to `Fence (IDP)`. -![Multi-Tenant Flow](docs/images/seq_diagrams/multi-tenant_flow.png) +![Multi-Tenant Flow](./docs/images/seq_diagrams/multi-tenant_flow.png) #### Notes See the [OIDC specification](http://openid.net/specs/openid-connect-core-1_0.html) for more details. Additionally, see the [OAuth2 specification](https://tools.ietf.org/html/rfc6749). -## Access Control / Authz - -Currently fence works with another Gen3 service named -[arborist](https://github.com/uc-cdis/arborist) to implement attribute-based access -control for commons users. The YAML file of access control information (see -[#create-user-access-file](#create-user-access-file)) contains a section `authz` which are data sent to -arborist in order to set up the access control model. - -## Accessing Data - -Fence has multiple options that provide a mechanism to access data. The access -to data can be moderated through authorization information in a User Access File. - -Users can be provided specific `privilege`'s on `projects` in the User Access -File. A `project` is identified by a unique authorization identifier AKA `auth_id`. - -A `project` can be associated with various storage backends that store -object data for that given `project`. You can assign `read-storage` and `write-storage` -privileges to users who should have access to that stored object data. `read` and -`write` allow access to the data stored in a graph database. - -Depending on the backend, Fence can be configured to provide users access to -the data in different ways. - - -### Signed URLS - -Temporary signed URLs are supported in all major commercial clouds. Signed URLs are the most 'cloud agnostic' way to allow users to access data located in different platforms. - -Fence has the ability to request a specific file by its GUID (globally unique identifier) and retrieve a temporary signed URL for object data in AWS or GCP that will provide direct access to that object. ### Google Cloud Storage -Whereas pre-signed URL is a cloud agnostic solution, services and tools on Google Cloud Platform prefer to use Google's concept of a "Service Account". Because of that, Fence provides a few more methods to access data in Google. +Whereas pre-signed URL is a cloud-agnostic solution, services and tools on Google Cloud Platform prefer to use Google's concept of a "Service Account". Because of that, Fence provides a few more methods to access data in Google. -See [Fence and Google](docs/google_architecture.md) for more details on data access methods specific to Google. +See [Fence and Google](docs/additional_documentation/google_architecture.md) for more details on data access methods specific to Google. -## Setup - -### Install Requirements and Fence - -Install [Poetry](https://python-poetry.org/docs/#installation). - -```bash -# Install Fence and dependencies -poetry install -``` - -### Create Configuration File - -Fence requires a configuration file to run. We have a command line -utility to help you create one based on a default configuration. - -The configuration file itself will live outside of this repo (to -prevent accidentally checking in sensitive information like database passwords). - -To create a new configuration file from the default configuration: - -```bash -python cfg_help.py create -``` - -This file will be placed in one of the default search directories for Fence. - -To get the exact path where the new configuration file was created, use: - -```bash -python cfg_help.py get -``` - -The file should have detailed information about each of the configuration -variables. **Remember to fill out the new configuration file!** - -#### Other Configuration Notes - -* Fence will look for configuration files from a list of search directories ( -which are currently defined in `fence/settings.py`.) -* For more configuration options (such as having multiple different config -files for development), see the `cfg_help.py` file. - -### Set Up Databases - -The tests clear out the database every time they are run. If you want -to keep a persistent database for manual testing and general local usage, -create a second test database with a different name: - -> NOTE: Requires a minimum of Postgres v9.4 (because of `JSONB` types used) - -```bash -# Create test database(s). -# This one is for automated tests, which clear the database after running; -# `tests/test_settings.py` should have `fence_test_tmp` in the `DB` variable. -psql -U test postgres -c 'create database fence_test_tmp' -userdatamodel-init --db fence_test_tmp -# This one is for manual testing/general local usage; Your config -# should have `fence_test` in the `DB` variable. -psql -U test postgres -c 'create database fence_test' -userdatamodel-init --db fence_test --username test --password test -``` - -### Keypair Configuration - -Fence uses RSA keypairs to sign and allow verification of JWTs that it issues. -When the application is initialized, Fence loads in keypair files from the -`keys` directory. To store keypair files, use the following procedure: - - Create a subdirectory in the `fence/keys` directory, named with a - unique identifier, preferably a timestamp in ISO 8601 format of when - the keys are created. The name of the directory is used for the `kid` - (key ID) for those keys; the default (assuming the directory is named - with an ISO timestamp) looks like this: - - fence_key_2018-05-01T14:00:00Z - - - Generate a private and public keypair following the RSA 256 algorithm - and store those in that directory. The key files must be named - `jwt_public_key.pem` and `jwt_private_key.pem`. - -To generate a keypair using `openssl`: -```bash -# Generate the private key. -openssl genpkey -algorithm RSA -out jwt_private_key.pem -pkeyopt rsa_keygen_bits:2048 - -# Generate the public key. -openssl rsa -pubout -in jwt_private_key.pem -out jwt_public_key.pem - -# Depending on the `openssl` distribution, you may find these work instead: -# -# openssl rsa -out private_key.pem 2048 -# openssl rsa -in private_key.pem -pubout -out public_key.pem -``` -It's not a bad idea to confirm that the files actually say `RSA PRIVATE KEY` -and `PUBLIC KEY` (and in fact Fence will require that the private key files it -uses actually say "PRIVATE KEY" and that the public keys do not). - -Files containing public/private keys should have this format (the format used -by `openssl` for generating RSA keys): -``` ------BEGIN PUBLIC KEY----- -... [key is here] ... ------END PUBLIC KEY----- -``` -If a key is not in this format, then `PyJWT` will raise errors about not being -able to read the key. - -Fence will use the first keypair in the list to sign the tokens it issues -through OAuth. - - -### Create User Access File - -You can setup user access via admin fence script providing a user yaml file -Example user yaml: -``` -cloud_providers: {} -groups: {} -users: - userA@gmail.com: - projects: - - auth_id: project_a - privilege: [read, update, create, delete] - - auth_id: project_b - privilege: [read] - userB@gmail.com: - projects: - - auth_id: project_b - privilege: [read] -``` -Example sync command: - -```bash -fence-create sync --yaml user.yaml -``` - -### Register OAuth Client - -When you want to build an application that uses Gen3 resources on behalf of a user, you should register an OAuth client for this app. -Fence right now exposes client registration via admin CLI, because the Oauth2 client for a Gen3 commons needs approval from the sponsor of the commons. If you are an external developer, you should submit a support ticket. - -As a Gen3 commons administrator, you can run following command for an approved client: -```bash -fence-create client-create --client CLIENT_NAME --urls OAUTH_REDIRECT_URL --username USERNAME -``` -This command should output a tuple of `(client_id, client_secret)` which must be -saved by the OAuth client to use with -`fence`. - -## Quickstart with Helm - -You can now deploy individual services via Helm! -Please refer to the Helm quickstart guide HERE (https://github.com/uc-cdis/fence/blob/master/docs/quickstart_helm.md) - -## Token management - -Fence utilizes [OpenID Connect](#OIDC) to generate tokens -for clients. It can also provide tokens directly to a user. - -Clients and users may then use those tokens with other -Gen3 Data Commons services to access protected endpoints that require specific permissions. - -We use JSON Web Tokens (JWTs) as the format for all tokens of the following types: - -- OIDC ID token: this token is used by the OIDC client to get a user's identity from the token content -- OIDC access token: this token can be sent to Gen3 services via bearer header and get protected resources. -- OIDC refresh token: this token can be sent to fence to request a new access / id token. - - - -### JWT Information - -#### Example ID Token - -``` -{ - "sub": "7", - "azp": "test-client", - "pur": "id", - "aud": [ - "openid", - "user", - "test-client" - ], - "context": { - "user": { - "is_admin": false, - "name": "test", - "projects": { - "phs000178": [ - "read", - "update", - "create", - "delete", - "read-storage" - ] - }, - "google": { - "linked_google_account": "somebody@example.com" - } - } - }, - "iss": "https://commons.org", - "jti": "3ae2910b-0294-43dc-af2a-03fd60082aef", - "exp": 1516983302, - "iat": 1516982102, - "auth_time": 1516982102 -} -``` - -#### Example Access Token - -``` -{ - "sub": "7", - "azp": "test-client", - "pur": "access", - "aud": [ - "openid", - "user", - "test-client" - ], - "context": { - "user": { - "is_admin": false, - "name": "test", - "projects": { - "phs000178": [ - "read", - "update", - "create", - "delete", - "read-storage" - ] - }, - "google": { - "proxy_group": "abcdefgh123456", - "linked_google_account": "somebody@example.com" - } - } - }, - "iss": "https://commons.org", - "jti": "2e6ade06-5afb-4ce7-9ab5-e206225ce291", - "exp": 1516983302, - "iat": 1516982102 -} -``` - -#### Example Refresh Token - -``` -{ - "sub": "7", - "azp": "test-client", - "pur": "refresh", - "aud": [ - "openid", - "user", - "test-client" - ], - "iss": "https://commons.org", - "jti": "c72e5573-39fa-4391-a445-191e370b7cc5", - "exp": 1517010902, - "iat": 1516982102 -} -``` - -## fence-create: Automating common tasks with a command line interface - -fence-create is a command line utility that is bundled with fence and allows you to automate some commons tasks within fence. For the latest and greatest run the command `fence-create --help`. - -WARNING: fence-create directly modifies the database in some cases and may circumvent security checks (most of these utilities are used for testing). BE CAREFUL when you're running these commands and make sure you know what they're doing. - - -### Register Internal Oauth Client - -As a Gen3 commons administrator, if you want to create an oauth client that skips user consent step, use the following command: - -```bash -fence-create client-create --client CLIENT_NAME --urls OAUTH_REDIRECT_URL --username USERNAME --auto-approve (--expires-in 30) -``` - -The optional `--expires-in` parameter allows specifying the number of days until this client expires. - -### Register an Implicit Oauth Client - -As a Gen3 commons administrator, if you want to create an implicit oauth client for a webapp: - -```bash -fence-create client-create --client fancywebappname --urls 'https://betawebapp.example/fence -https://webapp.example/fence' --public --username fancyapp --grant-types authorization_code refresh_token implicit -``` - -If there are more than one URL to add, use space to delimit them like this: - -```bash -fence-create client-create --urls 'https://url1/' 'https://url2/' --client ... -``` - -To specify allowed scopes, use the `allowed-scopes` argument: -```bash -fence-create client-create ... --allowed-scopes openid user data -``` - -### Register an Oauth Client for a Client Credentials flow - -The OAuth2 Client Credentials flow is used for machine-to-machine communication and scenarios in which typical authentication schemes like username + password do not make sense. The system authenticates and authorizes the app rather than a user. See the [OAuth2 specification](https://www.rfc-editor.org/rfc/rfc6749#section-4.4) for more details. - -As a Gen3 commons administrator, if you want to create an OAuth client for a client credentials flow: - -```bash -fence-create client-create --client CLIENT_NAME --grant-types client_credentials (--expires-in 30) -``` - -This command will return a client ID and client secret, which you can then use to obtain an access token: - -```bash -curl --request POST https://FENCE_URL/oauth2/token?grant_type=client_credentials -d scope="openid user" --user CLIENT_ID:CLIENT_SECRET -``` - -The optional `--expires-in` parameter allows specifying the number of *days* until this client expires. The recommendation is to rotate credentials with the `client_credentials` grant at least once a year (see [Rotate client credentials](#rotate-client-credentials) section). -NOTE: In Gen3, you can grant specific access to a client the same way you would to a user. See the [user.yaml guide](https://github.com/uc-cdis/fence/blob/master/docs/user.yaml_guide.md) for more details. - -NOTE: Client credentials tokens are not linked to a user (the claims contain no `sub` or `context.user.name` like other tokens). Some Gen3 endpoints that assume the token is linked to a user, or whose logic require there being a user, do not support them. For an example of how to adapt an endpoint to support client credentials tokens, see [here](https://github.com/uc-cdis/requestor/commit/a5078fae27fa258ac78045cf2bb89cb2104f53cf). For an example of how to explicitly reject client credentials tokens, see [here](https://github.com/uc-cdis/requestor/commit/0f4974c25343d2185c7cdb48dcdeb58f97800672). - -### Modify OAuth Client - -```bash -fence-create client-modify --client CLIENT_NAME --urls http://localhost/api/v0/oauth2/authorize -``` - -That command should output any modifications to the client. Similarly, multiple URLs are -allowed here too. - -Add `--append` argument to add new callback urls or allowed scopes to existing client (instead of replacing them) using `--append --urls` or `--append --allowed-scopes` -```bash -fence-create client-modify --client CLIENT_NAME --urls http://localhost/api/v0/new/oauth2/authorize --append (--expires-in 30) -``` - -### Rotate client credentials - -Use the `client-rotate` command to receive a new set of credentials (client ID and secret) for a client. The old credentials are NOT deactivated and must be deleted or expired separately (see [Delete Expired OAuth Clients](#delete-expired-oauth-clients) section). This allows for a rotation without downtime. - -```bash -fence-create client-rotate --client CLIENT_NAME (--expires-in 30) -``` - -Note that the `usersync` job must be run after rotating the credentials so that the new client ID is granted the same access as the old one. - -### Delete OAuth Client - -```bash -fence-create client-delete --client CLIENT_NAME -``` -That command should output the result of the deletion attempt. - -### Delete Expired OAuth Clients - -```bash -fence-create client-delete-expired -``` - -To post a warning in Slack about any clients that expired or are about to expire: - -```bash -fence-create client-delete-expired --slack-webhook --warning-days -``` - - -### List OAuth Clients - -```bash -fence-create client-list -``` -That command should output the full records for any registered OAuth clients. - -### Set up for External Buckets on Google - -```bash -fence-create link-external-bucket --bucket-name demo-bucket -fence-create link-bucket-to-project --bucket_id demo-bucket --bucket_provider google --project_auth_id test-project -``` - -The link-external-bucket returns an email for a Google group which needs to be added to access to the bucket `demo-bucket`. - -### Notify users who are blocking service account registration - -```bash -fence-create notify-problem-users --emails ex1@gmail.com ex2@gmail.com --auth_ids test --google_project_id test-google -``` - -`notify-problem-users` emails users in the provided list (can be fence user email or linked google email) who do not have access to any of the auth_ids provided. Also accepts a `check_linking` flag to check that each user has linked their google account. - -## Default Expiration Times in Fence +## Setup -Table contains various artifacts in fence that have temporary lifetimes and their default values. +See detailed explanation [here](docs/additional_documentation/setup.md) -> NOTE: "SA" in the below table stands for Service Account +## Additional documentation -| Name | Lifetime | Extendable? | Maximum Lifetime | Details -|--------------------------------------|--------------|-------------|-----------------------|------------------------------------------------------------------------------------------| -| Access Token | 20 minutes | TRUE | Life of Refresh Token | | -| Refresh Token | 30 days | FALSE | N/A | | -| User's SA Account Access | 7 days | TRUE | N/A | Access to data (e.g. length it stays in the proxy group). Can optionally provide an expiration less than 7 days | -| User's Google Account Access | 1 day | TRUE | N/A | After AuthN, how long we associate a Google email with the given user. Can optionally provide an expiration less than 1 day | -| User's Google Account Linkage | Indefinite | N/A | N/A | Can optionally provide an expiration less than 1 hour | -| Google Signed URL | Up to 1 hour | FALSE | N/A | Can optionally provide an expiration less than 1 hour | -| AWS Signed URL | Up to 1 hour | FALSE | N/A | Obtained by an oauth client through /credentials/google | -| Client SA (for User) Key | 10 days | FALSE | N/A | Obtained by the user themselves for temp access. Can optionally provide an expiration less than 10 days | -| User Primary SA Key | 10 days | FALSE | N/A | Used for Google URL signing | -| User Primary SA Key for URL Signing | 30 days | FALSE | N/A | | -| Sliding Session Window | 15 minutes | TRUE | 8 hours | access_token cookies get generated automatically when expired if session is still active | +1. [Terminologies](docs/additional_documentation/terminology.md) +2. [Accessing Data](docs/additional_documentation/data_access.md#accessing-data) +3. [Token management](docs/additional_documentation/token_management.md) +4. [fence-create](docs/additional_documentation/fence_create.md) +5. [Default expiration times](docs/additional_documentation/default_expiration_times.md) diff --git a/bin/fence_create.py b/bin/fence_create.py index 4eb905569..6c5a5ad45 100755 --- a/bin/fence_create.py +++ b/bin/fence_create.py @@ -329,6 +329,9 @@ def parse_arguments(): help='scopes to include in the token (e.g. "user" or "data")', ) token_create.add_argument("--exp", help="time in seconds until token expiration") + token_create.add_argument( + "--client_id", help="Client Id, required to generate refresh token" + ) force_link_google = subparsers.add_parser("force-link-google") force_link_google.add_argument( @@ -582,6 +585,7 @@ def main(): username=args.username, scopes=args.scopes, expires_in=args.exp, + client_id=args.client_id, ) token_type = str(args.type).strip().lower() if token_type == "access_token" or token_type == "access": diff --git a/clear_prometheus_multiproc b/clear_prometheus_multiproc index 4bb2b425f..af1ba6d18 100755 --- a/clear_prometheus_multiproc +++ b/clear_prometheus_multiproc @@ -4,6 +4,8 @@ set -ex rm -Rf $1 -mkdir $1 +mkdir -p $1 chmod 755 $1 -chown 100:101 $1 +if id -u nginx &>/dev/null; then + chown $(id -u nginx):$(id -g nginx) $1 +fi diff --git a/docs/additional_documentation/authorization.md b/docs/additional_documentation/authorization.md new file mode 100644 index 000000000..6fc31c7c1 --- /dev/null +++ b/docs/additional_documentation/authorization.md @@ -0,0 +1,8 @@ + +## Access Control / Authz + +Currently fence works with another Gen3 service named +[arborist](https://github.com/uc-cdis/arborist) to implement attribute-based access +control for commons users. The YAML file of access control information (see +[#create-user-access-file](setup.md#create-user-access-file)) contains a section `authz` which are data sent to +arborist in order to set up the access control model. diff --git a/docs/additional_documentation/data_access.md b/docs/additional_documentation/data_access.md new file mode 100644 index 000000000..3e5190fc9 --- /dev/null +++ b/docs/additional_documentation/data_access.md @@ -0,0 +1,22 @@ +## Accessing Data + +Fence has multiple options that provide a mechanism to access data. The access +to data can be moderated through authorization information in a User Access File. + +Users can be provided specific `privilege`'s on `projects` in the User Access +File. A `project` is identified by a unique authorization identifier AKA `auth_id`. + +A `project` can be associated with various storage backends that store +object data for that given `project`. You can assign `read-storage` and `write-storage` +privileges to users who should have access to that stored object data. `read` and +`write` allow access to the data stored in a graph database. + +Depending on the backend, Fence can be configured to provide users access to +the data in different ways. + + +### Signed URLS + +Temporary signed URLs are supported in all major commercial clouds. Signed URLs are the most 'cloud agnostic' way to allow users to access data located in different platforms. + +Fence has the ability to request a specific file by its GUID (globally unique identifier) and retrieve a temporary signed URL for object data in AWS or GCP that will provide direct access to that object. diff --git a/docs/dbgap_info.md b/docs/additional_documentation/dbgap_info.md similarity index 100% rename from docs/dbgap_info.md rename to docs/additional_documentation/dbgap_info.md diff --git a/docs/additional_documentation/default_expiration_times.md b/docs/additional_documentation/default_expiration_times.md new file mode 100644 index 000000000..9b0432270 --- /dev/null +++ b/docs/additional_documentation/default_expiration_times.md @@ -0,0 +1,19 @@ +## Default Expiration Times in Fence + +Table contains various artifacts in fence that have temporary lifetimes and their default values. + +> NOTE: "SA" in the below table stands for Service Account + +| Name | Lifetime | Extendable? | Maximum Lifetime | Details | +|-------------------------------------|--------------|-------------|-----------------------|-----------------------------------------------------------------------------------------------------------------------------| +| Access Token | 20 minutes | TRUE | Life of Refresh Token | | +| Refresh Token | 30 days | FALSE | N/A | | +| User's SA Account Access | 7 days | TRUE | N/A | Access to data (e.g. length it stays in the proxy group). Can optionally provide an expiration less than 7 days | +| User's Google Account Access | 1 day | TRUE | N/A | After AuthN, how long we associate a Google email with the given user. Can optionally provide an expiration less than 1 day | +| User's Google Account Linkage | Indefinite | N/A | N/A | Can optionally provide an expiration less than 1 hour | +| Google Signed URL | Up to 1 hour | FALSE | N/A | Can optionally provide an expiration less than 1 hour | +| AWS Signed URL | Up to 1 hour | FALSE | N/A | Obtained by an oauth client through /credentials/google | +| Client SA (for User) Key | 10 days | FALSE | N/A | Obtained by the user themselves for temp access. Can optionally provide an expiration less than 10 days | +| User Primary SA Key | 10 days | FALSE | N/A | Used for Google URL signing | +| User Primary SA Key for URL Signing | 30 days | FALSE | N/A | | +| Sliding Session Window | 15 minutes | TRUE | 8 hours | access_token cookies get generated automatically when expired if session is still active | diff --git a/docs/fence-create-deprecated.md b/docs/additional_documentation/fence-create-deprecated.md similarity index 100% rename from docs/fence-create-deprecated.md rename to docs/additional_documentation/fence-create-deprecated.md diff --git a/docs/additional_documentation/fence_create.md b/docs/additional_documentation/fence_create.md new file mode 100644 index 000000000..398544f98 --- /dev/null +++ b/docs/additional_documentation/fence_create.md @@ -0,0 +1,126 @@ +## fence-create: Automating common tasks with a command line interface + +fence-create is a command line utility that is bundled with fence and allows you to automate some commons tasks within fence. For the latest and greatest run the command `fence-create --help`. + +WARNING: fence-create directly modifies the database in some cases and may circumvent security checks (most of these utilities are used for testing). BE CAREFUL when you're running these commands and make sure you know what they're doing. + + +### Register Internal Oauth Client + +As a Gen3 commons administrator, if you want to create an oauth client that skips user consent step, use the following command: + +```bash +fence-create client-create --client CLIENT_NAME --urls OAUTH_REDIRECT_URL --username USERNAME --auto-approve (--expires-in 30) +``` + +The optional `--expires-in` parameter allows specifying the number of days until this client expires. + +### Register an Implicit Oauth Client + +As a Gen3 commons administrator, if you want to create an implicit oauth client for a webapp: + +```bash +fence-create client-create --client fancywebappname --urls 'https://betawebapp.example/fence +https://webapp.example/fence' --public --username fancyapp --grant-types authorization_code refresh_token implicit +``` + +If there are more than one URL to add, use space to delimit them like this: + +```bash +fence-create client-create --urls 'https://url1/' 'https://url2/' --client ... +``` + +To specify allowed scopes, use the `allowed-scopes` argument: +```bash +fence-create client-create ... --allowed-scopes openid user data +``` + +### Register an Oauth Client for a Client Credentials flow + +The OAuth2 Client Credentials flow is used for machine-to-machine communication and scenarios in which typical authentication schemes like username + password do not make sense. The system authenticates and authorizes the app rather than a user. See the [OAuth2 specification](https://www.rfc-editor.org/rfc/rfc6749#section-4.4) for more details. + +As a Gen3 commons administrator, if you want to create an OAuth client for a client credentials flow: + +```bash +fence-create client-create --client CLIENT_NAME --grant-types client_credentials (--expires-in 30) +``` + +This command will return a client ID and client secret, which you can then use to obtain an access token: + +```bash +curl --request POST https://FENCE_URL/oauth2/token?grant_type=client_credentials -d scope="openid user" --user CLIENT_ID:CLIENT_SECRET +``` + +The optional `--expires-in` parameter allows specifying the number of *days* until this client expires. The recommendation is to rotate credentials with the `client_credentials` grant at least once a year (see [Rotate client credentials](#rotate-client-credentials) section). + +NOTE: In Gen3, you can grant specific access to a client the same way you would to a user. See the [user.yaml guide](https://github.com/uc-cdis/fence/blob/master/docs/user.yaml_guide.md) for more details. + +NOTE: Client credentials tokens are not linked to a user (the claims contain no `sub` or `context.user.name` like other tokens). Some Gen3 endpoints that assume the token is linked to a user, or whose logic require there being a user, do not support them. For an example of how to adapt an endpoint to support client credentials tokens, see [here](https://github.com/uc-cdis/requestor/commit/a5078fae27fa258ac78045cf2bb89cb2104f53cf). For an example of how to explicitly reject client credentials tokens, see [here](https://github.com/uc-cdis/requestor/commit/0f4974c25343d2185c7cdb48dcdeb58f97800672). + +### Modify OAuth Client + +```bash +fence-create client-modify --client CLIENT_NAME --urls http://localhost/api/v0/oauth2/authorize +``` + +That command should output any modifications to the client. Similarly, multiple URLs are +allowed here too. + +Add `--append` argument to add new callback urls or allowed scopes to existing client (instead of replacing them) using `--append --urls` or `--append --allowed-scopes` +```bash +fence-create client-modify --client CLIENT_NAME --urls http://localhost/api/v0/new/oauth2/authorize --append (--expires-in 30) +``` + +### Rotate client credentials + +Use the `client-rotate` command to receive a new set of credentials (client ID and secret) for a client. The old credentials are NOT deactivated and must be deleted or expired separately (see [Delete Expired OAuth Clients](#delete-expired-oauth-clients) section). This allows for a rotation without downtime. + +```bash +fence-create client-rotate --client CLIENT_NAME (--expires-in 30) +``` + +Note that the `usersync` job must be run after rotating the credentials so that the new client ID is granted the same access as the old one. + +### Delete OAuth Client + +```bash +fence-create client-delete --client CLIENT_NAME +``` +That command should output the result of the deletion attempt. + +### Delete Expired OAuth Clients + +```bash +fence-create client-delete-expired +``` + +To post a warning in Slack about any clients that expired or are about to expire: + +```bash +fence-create client-delete-expired --slack-webhook --warning-days +``` + + +### List OAuth Clients + +```bash +fence-create client-list +``` +That command should output the full records for any registered OAuth clients. + +### Set up for External Buckets on Google + +```bash +fence-create link-external-bucket --bucket-name demo-bucket +fence-create link-bucket-to-project --bucket_id demo-bucket --bucket_provider google --project_auth_id test-project +``` + +The link-external-bucket returns an email for a Google group which needs to be added to access to the bucket `demo-bucket`. + +### Notify users who are blocking service account registration + +```bash +fence-create notify-problem-users --emails ex1@gmail.com ex2@gmail.com --auth_ids test --google_project_id test-google +``` + +`notify-problem-users` emails users in the provided list (can be fence user email or linked google email) who do not have access to any of the auth_ids provided. Also accepts a `check_linking` flag to check that each user has linked their google account. diff --git a/docs/fence_multifactor_authentication_guide.md b/docs/additional_documentation/fence_multifactor_authentication_guide.md similarity index 100% rename from docs/fence_multifactor_authentication_guide.md rename to docs/additional_documentation/fence_multifactor_authentication_guide.md diff --git a/docs/fence_shibboleth.md b/docs/additional_documentation/fence_shibboleth.md similarity index 95% rename from docs/fence_shibboleth.md rename to docs/additional_documentation/fence_shibboleth.md index 2cae15ef5..41db778ac 100644 --- a/docs/fence_shibboleth.md +++ b/docs/additional_documentation/fence_shibboleth.md @@ -21,7 +21,7 @@ The `/login/shib` endpoint accepts the query parameter `shib_idp`. Fence checks After the user logs in and is redirected to `/login/shib/login`, we get the `eppn` (EduPerson Principal Name) from the request headers to use as username. If the `eppn` is not available, we use the `persistent-id` (or `cn`) instead. -![Shibboleth Login Flow](images/seq_diagrams/shibboleth_flow.png) +![Shibboleth Login Flow](../images/seq_diagrams/shibboleth_flow.png) Notes about the NIH login implementation: - NIH login is used as the default when the `idp` is fence and no `shib_idp` is specified (for backwards compatibility). @@ -32,7 +32,7 @@ Notes about the NIH login implementation: ### In the multi-tenant Fence instance -The [Shibboleth dockerfile](../DockerfileShib) image is at https://quay.io/repository/cdis/fence-shib and is NOT compatible yet with python 3/the latest Fence (for now, use Fence 2.7.x). +The [Shibboleth dockerfile](../../DockerfileShib) image is at https://quay.io/repository/cdis/fence-shib and is NOT compatible yet with python 3/the latest Fence (for now, use Fence 2.7.x). The deployment only includes `revproxy` and `fenceshib`. The Fence configuration enables the `shibboleth` provider: diff --git a/docs/ga4gh_passports.md b/docs/additional_documentation/ga4gh_passports.md similarity index 91% rename from docs/ga4gh_passports.md rename to docs/additional_documentation/ga4gh_passports.md index 0e6f0da21..235a248cd 100644 --- a/docs/ga4gh_passports.md +++ b/docs/additional_documentation/ga4gh_passports.md @@ -25,7 +25,7 @@ References: This shows external DRS Client(s) communicating with Gen3 Framework Services (as a GA4GH DRS Server) and how G3FS interacts with Passport Brokers to validate and verify JWTs. -![Passport and Visa JWT Handling](images/ga4gh/passport_jwt_handling.png) +![Passport and Visa JWT Handling](../images/ga4gh/passport_jwt_handling.png) ## G3FS: Configurable Roles for Data Access @@ -33,11 +33,11 @@ Gen3 Framework Services are capable of acting in many different roles. As data r In order to describe the role of the passport in these various configurations, the following diagrams may help. -![Gen3 as DRS Server](images/ga4gh/gen3_as_drs.png) +![Gen3 as DRS Server](../images/ga4gh/gen3_as_drs.png) -![Gen3 as Client](images/ga4gh/gen3_as_client.png) +![Gen3 as Client](../images/ga4gh/gen3_as_client.png) -![Gen3 as Both](images/ga4gh/gen3_as_client_and_drs_server.png) +![Gen3 as Both](../images/ga4gh/gen3_as_client_and_drs_server.png) ## Performance Improvements @@ -52,15 +52,15 @@ We added a number of things to mitigate the performance impact on researchers' w To illustrate the need for such a cache, see the images below for before and after. -![Before Caching](images/ga4gh/caching_before.png) +![Before Caching](../images/ga4gh/caching_before.png) -![After Caching](images/ga4gh/caching_after.png) +![After Caching](../images/ga4gh/caching_after.png) ## User Identities Different GA4GH Visas may refer to the same subject differently. In order to maintain the known mappings between different representations of the same identity, we are creating an Issuer+Subject to User mapping table. The primary key on this table is the combination of the `iss` and `sub` from JWTs. -![User Identities](images/ga4gh/users.png) +![User Identities](../images/ga4gh/users.png) ## Backend Updates and Expiration @@ -68,6 +68,6 @@ In order to ensure the removal of access at the right time, the cronjobs we have There is an argument here for event-based architecture, but Gen3 does not currently support such an architecture. We are instead extending the support of our cronjobs to ensure expirations occur at the right time. -![Cronjobs and Expirations](images/ga4gh/expiration.png) +![Cronjobs and Expirations](../images/ga4gh/expiration.png) > _All diagrams are originally from an **internal** CTDS Document. The link to that document is [here](https://lucid.app/lucidchart/5c52b868-5cd2-4c6e-b53b-de2981f7da98/edit?invitationId=inv_9a757cb1-fc81-4189-934d-98c3db06d2fc) for internal people who need to edit the above diagrams._ diff --git a/docs/google_architecture.md b/docs/additional_documentation/google_architecture.md similarity index 98% rename from docs/google_architecture.md rename to docs/additional_documentation/google_architecture.md index b00012ff0..65bc8d7d1 100644 --- a/docs/google_architecture.md +++ b/docs/additional_documentation/google_architecture.md @@ -30,7 +30,7 @@ To support the 3 methods of access mentioned above, we have a generic architectu That architecture involves Google's concept of **groups** and use of their **IAM Policies** in the Google Cloud Platform. The following diagram shows the layers between the user themselves and the bucket. -![Google Access Architecture](images/g_architecture.png) +![Google Access Architecture](../images/g_architecture.png) Working backwards from the Google Bucket itself, we have a **Google Bucket Access Group**, which, as you probably guessed, is a Google Group that provides access to the bucket. That group is assigned a **role** on the Google **resource** (the Google Bucket). **Roles** provide a set of permissions (like read privileges). The combinations of those roles on the bucket become the bucket's **Policy**. You can read more about Google's IAM terms and concepts in [their docs](https://cloud.google.com/iam/docs). @@ -46,7 +46,7 @@ Google groups contain **members** (another Google term) and a Google group can b A more representative diagram of the structures that allow users to get access to the buckets may look something like this: -![Representative Google Access Architecture](images/rep_g_architecture.png) +![Representative Google Access Architecture](../images/rep_g_architecture.png) #### User's Proxy Group @@ -169,7 +169,7 @@ In the above script, `google-project-to-bill` is either the `userProject` provid Fence facilitates the creation of Signed URLs to access Google Storage objects. These URLs provide temporary, authenticated, access to anyone with the URL but must be generated by someone who has access. -![Signed URLs](images/signed_urls.png) +![Signed URLs](../images/signed_urls.png) Design Requirements: @@ -195,7 +195,7 @@ This allows clients to manage their temporary credentials without the chance of Each Client Service Account is a member in the User's Proxy Group, meaning it has the same access that the user themselves have. -![Temporary Service Account Credentials](images/g_sa_creds.png) +![Temporary Service Account Credentials](../images/g_sa_creds.png) > WARNING: By default, Google Service Account Keys have an expiration of 10 years. To create a more manageable and secure expiration you must manually "expire" the keys by deleting them with a cronjob (once they are alive longer than a configured expiration). Fence's command line tool `fence-create` has a function for expiring keys that you should run on a schedule. Check out `fence-create google-manage-keys --help` @@ -229,7 +229,7 @@ A user logs into fence with their eRA Commons ID. To get access to data through Google Account Linking is achieved by sending the user through the beginning of the OIDC flow with Google. The user is redirected to a Google Login page and whichever account they successfully log in to becomes linked to their fence identity. -![Google Account Linking](images/g_accnt_link.png) +![Google Account Linking](../images/g_accnt_link.png) We require the user to log in so that we can authenticate them and only link an account they actually own. @@ -239,7 +239,7 @@ Once linked, the user's Google Account is then placed *temporarily* inside their At the moment, the *link* between the User and their Google Account does not expire. The access to data *does* expire though. Explicit refreshing of access must be done by an authenticated user or valid client with those permissions through Fence's API. -![Google Account Linking After Expiration](images/g_accnt_link_2.png) +![Google Account Linking After Expiration](../images/g_accnt_link_2.png) #### Service Account Registration @@ -312,7 +312,7 @@ The Service Accounts are validated first in the cronjob so that if multiple SA's This diagram shows a single Google Project with 3 users (`UserA`, `UserB`, and `UserC`). All of them have already gone through the linking process with fence to associate their Google Account with their fence identity. -![Service Account Registration](images/sa_reg.png) +![Service Account Registration](../images/sa_reg.png) The project service account, `Service Account A`, has been registered for access to a fence `Project` which has data in `Bucket Y`. The service account is given access by placing it *directly in the Google Bucket Access Group*. @@ -326,6 +326,6 @@ The user must request fence `Projects` that the service account should have acce If someone attempting to register `Service Account A` with fence `Projects` that have data in *both* `Bucket X` and `Bucket Y`, registration will fail. Why? Because not every user in the Google Project have access to that data. -![Service Account Registration](images/sa_invalid_reg.png) +![Service Account Registration](../images/sa_invalid_reg.png) --- diff --git a/docs/local_multi_fence.md b/docs/additional_documentation/local_multi_fence.md similarity index 100% rename from docs/local_multi_fence.md rename to docs/additional_documentation/local_multi_fence.md diff --git a/docs/quickstart_helm.md b/docs/additional_documentation/quickstart_helm.md similarity index 100% rename from docs/quickstart_helm.md rename to docs/additional_documentation/quickstart_helm.md diff --git a/docs/register.md b/docs/additional_documentation/register.md similarity index 100% rename from docs/register.md rename to docs/additional_documentation/register.md diff --git a/docs/additional_documentation/setup.md b/docs/additional_documentation/setup.md new file mode 100644 index 000000000..5dcafc37b --- /dev/null +++ b/docs/additional_documentation/setup.md @@ -0,0 +1,157 @@ + +## Setup + +### Install Requirements and Fence + +Install [Poetry](https://python-poetry.org/docs/#installation). + +```bash +# Install Fence and dependencies +poetry install +``` + +### Create Configuration File + +Fence requires a configuration file to run. We have a command line +utility to help you create one based on a default configuration. + +The configuration file itself will live outside of this repo (to +prevent accidentally checking in sensitive information like database passwords). + +To create a new configuration file from the default configuration: + +```bash +python cfg_help.py create +``` + +This file will be placed in one of the default search directories for Fence. + +To get the exact path where the new configuration file was created, use: + +```bash +python cfg_help.py get +``` + +The file should have detailed information about each of the configuration +variables. **Remember to fill out the new configuration file!** + +Once you have done so, you can run `alembic upgrade head` to generate the tables needed +to run fence. + +#### Other Configuration Notes + +* Fence will look for configuration files from a list of search directories ( +which are currently defined in `fence/settings.py`.) +* For more configuration options (such as having multiple different config +files for development), see the `cfg_help.py` file. + +### Set Up Databases + +The tests clear out the database every time they are run. If you want +to keep a persistent database for manual testing and general local usage, +create a second test database with a different name: + +> NOTE: Requires a minimum of Postgres v9.4 (because of `JSONB` types used) + +```bash +# Create test database(s). +# This one is for automated tests, which clear the database after running; +# `tests/test_settings.py` should have `fence_test_tmp` in the `DB` variable. +psql -U test postgres -c 'create database fence_test_tmp' +userdatamodel-init --db fence_test_tmp +# This one is for manual testing/general local usage; Your config +# should have `fence_test` in the `DB` variable. +psql -U test postgres -c 'create database fence_test' +userdatamodel-init --db fence_test --username test --password test +``` + +### Keypair Configuration + +Fence uses RSA keypairs to sign and allow verification of JWTs that it issues. +When the application is initialized, Fence loads in keypair files from the +`keys` directory. To store keypair files, use the following procedure: + - Create a subdirectory in the `fence/keys` directory, named with a + unique identifier, preferably a timestamp in ISO 8601 format of when + the keys are created. The name of the directory is used for the `kid` + (key ID) for those keys; the default (assuming the directory is named + with an ISO timestamp) looks like this: + + fence_key_2018-05-01T14:00:00Z + + - Generate a private and public keypair following the RSA 256 algorithm + and store those in that directory. The key files must be named + `jwt_public_key.pem` and `jwt_private_key.pem`. + +To generate a keypair using `openssl`: +```bash +# Generate the private key. +openssl genpkey -algorithm RSA -out jwt_private_key.pem -pkeyopt rsa_keygen_bits:2048 + +# Generate the public key. +openssl rsa -pubout -in jwt_private_key.pem -out jwt_public_key.pem + +# Depending on the `openssl` distribution, you may find these work instead: +# +# openssl rsa -out private_key.pem 2048 +# openssl rsa -in private_key.pem -pubout -out public_key.pem +``` +It's not a bad idea to confirm that the files actually say `RSA PRIVATE KEY` +and `PUBLIC KEY` (and in fact Fence will require that the private key files it +uses actually say "PRIVATE KEY" and that the public keys do not). + +Files containing public/private keys should have this format (the format used +by `openssl` for generating RSA keys): +``` +-----BEGIN PUBLIC KEY----- +... [key is here] ... +-----END PUBLIC KEY----- +``` +If a key is not in this format, then `PyJWT` will raise errors about not being +able to read the key. + +Fence will use the first keypair in the list to sign the tokens it issues +through OAuth. + + +### Create User Access File + +You can setup user access via admin fence script providing a user yaml file +Example user yaml: +``` +cloud_providers: {} +groups: {} +users: + userA@gmail.com: + projects: + - auth_id: project_a + privilege: [read, update, create, delete] + - auth_id: project_b + privilege: [read] + userB@gmail.com: + projects: + - auth_id: project_b + privilege: [read] +``` +Example sync command: + +```bash +fence-create sync --yaml user.yaml +``` + +### Register OAuth Client + +When you want to build an application that uses Gen3 resources on behalf of a user, you should register an OAuth client for this app. +Fence right now exposes client registration via admin CLI, because the Oauth2 client for a Gen3 commons needs approval from the sponsor of the commons. If you are an external developer, you should submit a support ticket. + +As a Gen3 commons administrator, you can run following command for an approved client: +```bash +fence-create client-create --client CLIENT_NAME --urls OAUTH_REDIRECT_URL --username USERNAME +``` +This command should output a tuple of `(client_id, client_secret)` which must be +saved by the OAuth client to use with +`fence`. + +## Quickstart with Helm + +You can now deploy individual services via Helm! +Please refer to the Helm quickstart guide HERE (https://github.com/uc-cdis/fence/blob/master/docs/quickstart_helm.md) diff --git a/docs/additional_documentation/terminology.md b/docs/additional_documentation/terminology.md new file mode 100644 index 000000000..8e3ffd812 --- /dev/null +++ b/docs/additional_documentation/terminology.md @@ -0,0 +1,51 @@ +## Terminologies + +### AuthN + +Authentication - establishes "who you are" with the application through communication with an [Identity Provider](#IdP). + +### AuthZ + +Authorization - establishes "what you can do" and "which resources you have access to" within the application. + +### IdP + +Identity Provider - the service that lets a user login and provides the identity of the user to downstream services. Examples: Google login, University login, NIH Login. + +### Auth broker + +An interface which enables a user to authenticate using any of multiple IdPs. + +### OAuth2 + +A widely used AuthZ protocol for delegating access to an application to use resources on behalf of a user. + +https://tools.ietf.org/html/rfc6749 + +https://oauth.net/2/ + +#### Client + +OAuth 2.0 Client - An application which makes requests for protected resources (on a resource server) on behalf of a resource owner (end-user) and with the resource owner's authorization. + +#### Auth Server + +OAuth 2.0 Authorization Server - A server which issues access tokens to the client after successfully authenticating the resource owner and obtaining authorization. + +#### Access Token + +A string, issued by the auth server to the client, representing authorization credentials used to access protected resources (on a resource server). + +### OIDC + +OpenID Connect - an extension of OAuth2 which provides an AuthN layer on top of the OAuth 2.0 AuthZ layer. It introduced a new type of token, the id token, that is specifically designed to be consumed by clients to get the identity information of the user. + +http://openid.net/specs/openid-connect-core-1_0.html + +#### OP + +OpenID Provider - an OAuth 2.0 Authentication Server which also implements OpenID Connect. + +#### RP + +Relying Party - an OAuth 2.0 Client which uses (requests) OpenID Connect. diff --git a/docs/additional_documentation/token_management.md b/docs/additional_documentation/token_management.md new file mode 100644 index 000000000..c8c39eba5 --- /dev/null +++ b/docs/additional_documentation/token_management.md @@ -0,0 +1,113 @@ + +## Token management + +Fence utilizes [OpenID Connect](terminology.md#oidc) to generate tokens +for clients. It can also provide tokens directly to a user. + +Clients and users may then use those tokens with other +Gen3 Data Commons services to access protected endpoints that require specific permissions. + +We use JSON Web Tokens (JWTs) as the format for all tokens of the following types: + +- OIDC ID token: this token is used by the OIDC client to get a user's identity from the token content +- OIDC access token: this token can be sent to Gen3 services via bearer header and get protected resources. +- OIDC refresh token: this token can be sent to fence to request a new access / id token. + + + +### JWT Information + +#### Example ID Token + +``` +{ + "sub": "7", + "azp": "test-client", + "pur": "id", + "aud": [ + "openid", + "user", + "test-client" + ], + "context": { + "user": { + "is_admin": false, + "name": "test", + "projects": { + "phs000178": [ + "read", + "update", + "create", + "delete", + "read-storage" + ] + }, + "google": { + "linked_google_account": "somebody@example.com" + } + } + }, + "iss": "https://commons.org", + "jti": "3ae2910b-0294-43dc-af2a-03fd60082aef", + "exp": 1516983302, + "iat": 1516982102, + "auth_time": 1516982102 +} +``` + +#### Example Access Token + +``` +{ + "sub": "7", + "azp": "test-client", + "pur": "access", + "aud": [ + "openid", + "user", + "test-client" + ], + "context": { + "user": { + "is_admin": false, + "name": "test", + "projects": { + "phs000178": [ + "read", + "update", + "create", + "delete", + "read-storage" + ] + }, + "google": { + "proxy_group": "abcdefgh123456", + "linked_google_account": "somebody@example.com" + } + } + }, + "iss": "https://commons.org", + "jti": "2e6ade06-5afb-4ce7-9ab5-e206225ce291", + "exp": 1516983302, + "iat": 1516982102 +} +``` + +#### Example Refresh Token + +``` +{ + "sub": "7", + "azp": "test-client", + "pur": "refresh", + "aud": [ + "openid", + "user", + "test-client" + ], + "iss": "https://commons.org", + "jti": "c72e5573-39fa-4391-a445-191e370b7cc5", + "exp": 1517010902, + "iat": 1516982102 +} +``` \ No newline at end of file diff --git a/docs/user.yaml_guide.md b/docs/additional_documentation/user.yaml_guide.md similarity index 98% rename from docs/user.yaml_guide.md rename to docs/additional_documentation/user.yaml_guide.md index ec9e583eb..893d32045 100644 --- a/docs/user.yaml_guide.md +++ b/docs/additional_documentation/user.yaml_guide.md @@ -29,13 +29,13 @@ In a fully deployed Gen3 Commons using [Cloud Automation](https://github.com/uc- } ``` -A template, ready-to-use `user.yaml` file can be found [here](base_user.yaml). +A template, ready-to-use `user.yaml` file can be found [here](../base_user.yaml). When updating your `user.yaml` file, you should use the [`gen3users` CLI](https://github.com/uc-cdis/gen3users#gen3users) to validate it before use. ## Format -Note that the `user.yaml` example below is minimal, as the goal is only to describe its structure. For a working `user.yaml` file that contains everything needed to get started, refer to the [base user.yaml](base_user.yaml) instead. +Note that the `user.yaml` example below is minimal, as the goal is only to describe its structure. For a working `user.yaml` file that contains everything needed to get started, refer to the [base user.yaml](../base_user.yaml) instead. ``` authz: diff --git a/docs/usersync.md b/docs/additional_documentation/usersync.md similarity index 99% rename from docs/usersync.md rename to docs/additional_documentation/usersync.md index ef896c37b..8c0a5d79a 100644 --- a/docs/usersync.md +++ b/docs/additional_documentation/usersync.md @@ -6,7 +6,7 @@ Usersync is a script that parses user access information from multiple sources ( ## Usersync flow -![Usersync Flow](images/usersync.png) +![Usersync Flow](../images/usersync.png) > The access from the user.yaml file and the dbGaP authorization files is combined (see example below), but the user.yaml file overrides the user information (such as email) obtained from the dbGaP authorization files. diff --git a/docs/azure_architecture.md b/docs/azure/azure_architecture.md similarity index 75% rename from docs/azure_architecture.md rename to docs/azure/azure_architecture.md index b47c19a41..acf15ff55 100755 --- a/docs/azure_architecture.md +++ b/docs/azure/azure_architecture.md @@ -11,7 +11,7 @@ You can review how `fence` works with [Azure Blob Storage](#Azure-Blob-Storage) ### Azure Blob Storage -![Azure Blob Storage with Fence](./images/m_fence_azure_blob_storage.png) +![Azure Blob Storage with Fence](../images/m_fence_azure_blob_storage.png) The diagram shows 2 separate workflows in order for `fence` to interact with [Azure Blob Storage](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction): @@ -26,7 +26,7 @@ Also note that there's alternatives that could be considered for [future develop You can provision an Azure Storage Account with [Blob Storage as a one-time setup](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-quickstart-blobs-portal). You can further make the blobs [public read only](https://docs.microsoft.com/en-us/azure/storage/blobs/anonymous-read-access-configure?tabs=portal) for dev / test purposes, but it would be advisable to avoid this setup in a production scenario and consider using non-public ones instead. -With the Azure Blob Storage Account setup, you can further upload files into your Blob Storage using [Azure Storage Explorer](https://azure.microsoft.com/en-us/features/storage-explorer/) for manual testing, or you can automate loading data files into Azure Blob Storage (e.g. using [Azure Blob Storage Python SDK](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob)). +With the Azure Blob Storage Account setup, you can further upload files into your Blob Storage using [Azure Storage Explorer](https://azure.microsoft.com/en-us/products/storage/storage-explorer/) for manual testing, or you can automate loading data files into Azure Blob Storage (e.g. using [Azure Blob Storage Python SDK](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob)). Assuming that you have preexisting files in an Azure Blob Storage Account, you can work through the following steps to index the files: @@ -44,12 +44,12 @@ You can use the Azure Blob Storage client to connect to Azure Blob Storage, and #### Configuration Details -You can update the [Fence config.yaml](../fence/config-default.yaml) to include the following values: +You can update the [Fence config.yaml](../../fence/config-default.yaml) to include the following values: -Name | Value | Description ------- | ------|---------- -`AZ_BLOB_CREDENTIALS` | DefaultEndpointsProtocol=https;AccountName=somestorageaccount;AccountKey=storageaccountkey;BlobEndpoint=`https://somestorageaccount.blob.core.windows.net/`; | This is the [Azure Blob Storage Connection String](https://docs.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&tabs=azure-portal#view-account-access-keys). You can also set this to `'*'` if you are indexing URLs for [public read access Azure Blob Storage containers](https://docs.microsoft.com/en-us/azure/storage/blobs/anonymous-read-access-configure?tabs=portal). Note that if you're using the URL for a public read access Azure Blob Storage container, then operations such as `delete` and `upload` will not work. -`AZ_BLOB_CONTAINER_URL` | `https://storageaccountname.blob.core.windows.net/storage-container` | This is the destination container for uploading with a given SAS token. You can set this value to designate a pre-existing storage container to upload indexed files, for example the new files could sit in `https://storageaccountname.blob.core.windows.net/storage-container/someguid/some/blob/file.txt`. If the storage account doesn't align with the indexed URL (e.g. you're using a public url or the storage account doesn't match), the upload will not work. If `AZ_BLOB_CREDENTIALS` is `'*'` then uploads from an indexed file using a public URL will not work. This value should be associated with the same Azure Blob Storage account used with the [Azure Blob Storage Connection String](https://docs.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&tabs=azure-portal#view-account-access-keys) for `AZ_BLOB_CREDENTIALS`. +| Name | Value | Description | +|-------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `AZ_BLOB_CREDENTIALS` | DefaultEndpointsProtocol=https;AccountName=somestorageaccount;AccountKey=storageaccountkey;BlobEndpoint=`https://somestorageaccount.blob.core.windows.net/`; | This is the [Azure Blob Storage Connection String](https://docs.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&tabs=azure-portal#view-account-access-keys). You can also set this to `'*'` if you are indexing URLs for [public read access Azure Blob Storage containers](https://docs.microsoft.com/en-us/azure/storage/blobs/anonymous-read-access-configure?tabs=portal). Note that if you're using the URL for a public read access Azure Blob Storage container, then operations such as `delete` and `upload` will not work. | +| `AZ_BLOB_CONTAINER_URL` | `https://storageaccountname.blob.core.windows.net/storage-container` | This is the destination container for uploading with a given SAS token. You can set this value to designate a pre-existing storage container to upload indexed files, for example the new files could sit in `https://storageaccountname.blob.core.windows.net/storage-container/someguid/some/blob/file.txt`. If the storage account doesn't align with the indexed URL (e.g. you're using a public url or the storage account doesn't match), the upload will not work. If `AZ_BLOB_CREDENTIALS` is `'*'` then uploads from an indexed file using a public URL will not work. This value should be associated with the same Azure Blob Storage account used with the [Azure Blob Storage Connection String](https://docs.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&tabs=azure-portal#view-account-access-keys) for `AZ_BLOB_CREDENTIALS`. | Using pre-signed urls for download is implemented; it's currently using a [SAS Token](https://docs.microsoft.com/en-us/azure/storage/common/storage-sas-overview). @@ -64,7 +64,7 @@ For example, when you index the file (e.g. using the [gen3sdk](https://github.co So if you navigate to `https://mydatacommons/files/guid` (assuming that the metadata is already setup), you can click on the file to download which will make the call to get the appropriate signed URL. -![Presigned URL](./images/m_fence_presigned_url.png) +![Presigned URL](../images/m_fence_presigned_url.png) * If you index the file with a URL for a blob in a **public** Azure Blob Storage Container and the `AZ_BLOB_CREDENTIALS` are set to `'*'`, then the **non-signed** converted indexed URL will be used (e.g. `https://.blob.core.windows.net//some/path/to/file.txt`) > You need to replace the URL such as `https://.blob.core.windows.net//some/path/to/file.txt` with `az://.blob.core.windows.net//some/path/to/file.txt` upon submitting the record to `indexd`. @@ -83,7 +83,7 @@ You can use [user delegation SAS tokens](https://docs.microsoft.com/en-us/rest/a ### Azure Active Directory -![Azure AD with Fence](./images/m_fence_azure_AD.png) +![Azure AD with Fence](../images/m_fence_azure_AD.png) The diagram shows 3 separate workflows in order for `fence` to interact with Azure AD: @@ -104,16 +104,16 @@ Also note that there's alternatives that could be considered for [future develop 1. [Create](https://docs.microsoft.com/en-us/azure/data-explorer/provision-azure-ad-app) AAD Application 2. Add a redirect URL * The application needs to have redirect URL that is the FDQN of commons appended with `(commons fdqn)/user/login/microsoft/login`. -![Add Redirect URI](./images/m_fence_azure_AD_app_registration_1.png) +![Add Redirect URI](../images/m_fence_azure_AD_app_registration_1.png) 3. Set a secret for the AAD application -![Set the Client Secret](./images/m_fence_azure_AD_app_registration_2.png) +![Set the Client Secret](../images/m_fence_azure_AD_app_registration_2.png) 4. Retrieve the `client id` of the AAD application -![Retrieve client ID](./images/m_fence_azure_AD_app_registration_3.png) -5. Update [fence-config.yaml](../fence/config-default.yaml) +![Retrieve client ID](../images/m_fence_azure_AD_app_registration_3.png) +5. Update [fence-config.yaml](../../fence/config-default.yaml) * Set the `microsoft_client_id` to be the `client_id` in step 4. * Set the `microsoft_client_secret` to be the secret value in step 3. - * Make sure the `BASE_URL` in [fence-config.yaml](../fence/config-default.yaml) is correct. - * Make sure the `redirect_url` in [fence-config.yaml](../fence/config-default.yaml) is `{{BASE_URL}}/login/microsoft/login/` is matches the redirect URL (`(commons fdqn)/user/login/microsoft/login`) in step 2 + * Make sure the `BASE_URL` in [fence-config.yaml](../../fence/config-default.yaml) is correct. + * Make sure the `redirect_url` in [fence-config.yaml](../../fence/config-default.yaml) is `{{BASE_URL}}/login/microsoft/login/` is matches the redirect URL (`(commons fdqn)/user/login/microsoft/login`) in step 2 6. Restart `fence` service with the updated secrets #### User Yaml Setup @@ -122,7 +122,7 @@ Also note that there's alternatives that could be considered for [future develop It's helpful to understand some of the [Arborist terms and definitions](https://github.com/uc-cdis/arborist#terminology-and-definitions), which covers **action**, **permission**, **role**, **resource**, **policy**, and **group**. -Further, it's helpful to understand the Arborist options for [configuring access](https://github.com/uc-cdis/arborist#configuring-access). You can see an example of granting **users** and **groups** access and more details in the [user.yaml guide](./user.yaml_guide.md). +Further, it's helpful to understand the Arborist options for [configuring access](https://github.com/uc-cdis/arborist#configuring-access). You can see an example of granting **users** and **groups** access and more details in the [user.yaml guide](../additional_documentation/user.yaml_guide.md). At a high level, this setup involves a couple steps: diff --git a/docs/azure_devops_pipeline.md b/docs/azure/azure_devops_pipeline.md similarity index 87% rename from docs/azure_devops_pipeline.md rename to docs/azure/azure_devops_pipeline.md index b0ff95d82..7e7be9fd8 100755 --- a/docs/azure_devops_pipeline.md +++ b/docs/azure/azure_devops_pipeline.md @@ -1,12 +1,12 @@ # Azure DevOps Build Pipeline -The purpose of this [Azure DevOps Pipeline](../azure-devops-pipeline.yaml) is to build `fence`, run a test suite, and then push the `fence` container into an [Azure Container Registry](https://docs.microsoft.com/en-us/azure/container-registry/container-registry-get-started-portal). +The purpose of this [Azure DevOps Pipeline](../../azure-devops-pipeline.yaml) is to build `fence`, run a test suite, and then push the `fence` container into an [Azure Container Registry](https://docs.microsoft.com/en-us/azure/container-registry/container-registry-get-started-portal). ## Getting Started If you don't already have access, you can use the free sign up with [Azure Devops](https://docs.microsoft.com/en-us/azure/devops/pipelines/get-started/pipelines-sign-up?view=azure-devops). -You can also import the [pipeline](../azure-devops-pipeline.yaml), see these [doc notes](https://docs.microsoft.com/en-us/azure/devops/pipelines/get-started/clone-import-pipeline?view=azure-devops&tabs=yaml#export-and-import-a-pipeline) as a guide. +You can also import the [pipeline](../../azure-devops-pipeline.yaml), see these [doc notes](https://docs.microsoft.com/en-us/azure/devops/pipelines/get-started/clone-import-pipeline?view=azure-devops&tabs=yaml#export-and-import-a-pipeline) as a guide. ### Setup Azure Container Registry @@ -103,7 +103,7 @@ First, make sure you have already [imported your Azure DevOps Pipeline](https:// Click on the pipeline and then click edit, which will let you update the variables in the Azure DevOps pipeline: -![Click on Variables](./azure_devops_pipeline_config_1.png) +![Click on Variables](azure_devops_pipeline_config_1.png) Variable Name | Description ------ | ------ @@ -117,8 +117,8 @@ GIT_REPO_TAG | This is the tag to use for the `fence` git repository, with a def After updating the variables, be sure to click **save**: -![Save updated variables](./azure_devops_pipeline_config_2.png) +![Save updated variables](azure_devops_pipeline_config_2.png) You can run the pipeline to validate the `fence` build and push to ACR. -![Run the pipeline](./azure_devops_pipeline_config_3.png) \ No newline at end of file +![Run the pipeline](azure_devops_pipeline_config_3.png) \ No newline at end of file diff --git a/docs/azure_devops_pipeline_config_1.png b/docs/azure/azure_devops_pipeline_config_1.png similarity index 100% rename from docs/azure_devops_pipeline_config_1.png rename to docs/azure/azure_devops_pipeline_config_1.png diff --git a/docs/azure_devops_pipeline_config_2.png b/docs/azure/azure_devops_pipeline_config_2.png similarity index 100% rename from docs/azure_devops_pipeline_config_2.png rename to docs/azure/azure_devops_pipeline_config_2.png diff --git a/docs/azure_devops_pipeline_config_3.png b/docs/azure/azure_devops_pipeline_config_3.png similarity index 100% rename from docs/azure_devops_pipeline_config_3.png rename to docs/azure/azure_devops_pipeline_config_3.png diff --git a/fence/__init__.py b/fence/__init__.py index 91e02e656..e7a0027f8 100755 --- a/fence/__init__.py +++ b/fence/__init__.py @@ -1,21 +1,17 @@ from collections import OrderedDict import os -import tempfile from urllib.parse import urljoin -import flask -from flask_cors import CORS -from sqlalchemy.orm import scoped_session -from flask import _app_ctx_stack, current_app -from werkzeug.local import LocalProxy from authutils.oauth2.client import OAuthClient -from cdislogging import get_logger -from gen3authz.client.arborist.client import ArboristClient -from flask_wtf.csrf import validate_csrf -from werkzeug.middleware.dispatcher import DispatcherMiddleware from azure.storage.blob import BlobServiceClient from azure.core.exceptions import ResourceNotFoundError -from urllib.parse import urlparse +from cdislogging import get_logger +import flask +from flask_cors import CORS +from flask_wtf.csrf import validate_csrf +from gen3authz.client.arborist.client import ArboristClient +from sqlalchemy.orm import scoped_session + # Can't read config yet. Just set to debug for now, else no handlers. # Later, in app_config(), will actually set level based on config @@ -31,6 +27,7 @@ ) from fence.auth import logout, build_redirect_url +from fence.metrics import metrics from fence.blueprints.data.indexd import S3IndexedFileLocation from fence.blueprints.login.utils import allowed_login_redirects, domain from fence.errors import UserError @@ -69,11 +66,6 @@ from pcdcutils.errors import KeyPathInvalidError, NoKeyError -# for some reason the temp dir does not get created properly if we move -# this statement to `_setup_prometheus()` -PROMETHEUS_TMP_COUNTER_DIR = tempfile.TemporaryDirectory() - - app = flask.Flask(__name__) CORS(app=app, headers=["content-type", "accept"], expose_headers="*") @@ -104,6 +96,9 @@ def app_init( app_sessions(app) app_register_blueprints(app) server.init_app(app, query_client=query_client) + logger.info( + f"Prometheus metrics are{'' if config['ENABLE_PROMETHEUS_METRICS'] else ' NOT'} enabled." + ) def app_sessions(app): @@ -208,6 +203,15 @@ def public_keys(): {"keys": [(keypair.kid, keypair.public_key) for keypair in app.keypairs]} ) + @app.route("/metrics") + def metrics_endpoint(): + """ + /!\ There is no authz control on this endpoint! + In cloud-automation setups, access to this endpoint is blocked at the revproxy level. + """ + data, content_type = metrics.get_latest_metrics() + return flask.Response(data, content_type=content_type) + def _check_azure_storage(app): """ @@ -367,14 +371,6 @@ def app_config( _setup_audit_service_client(app) _setup_data_endpoint_and_boto(app) _load_keys(app, root_dir) - _set_authlib_cfgs(app) - - app.prometheus_counters = {} - if config["ENABLE_PROMETHEUS_METRICS"]: - logger.info("Enabling Prometheus metrics...") - _setup_prometheus(app) - else: - logger.info("Prometheus metrics are NOT enabled.") app.storage_manager = StorageManager(config["STORAGE_CREDENTIALS"], logger=logger) @@ -402,8 +398,9 @@ def app_config( def _setup_data_endpoint_and_boto(app): if "AWS_CREDENTIALS" in config and len(config["AWS_CREDENTIALS"]) > 0: - value = list(config["AWS_CREDENTIALS"].values())[0] - app.boto = BotoManager(value, logger=logger) + creds = config["AWS_CREDENTIALS"] + buckets = config.get("S3_BUCKETS", {}) + app.boto = BotoManager(creds, buckets, logger=logger) app.register_blueprint(fence.blueprints.data.blueprint, url_prefix="/data") @@ -420,24 +417,6 @@ def _load_keys(app, root_dir): } -def _set_authlib_cfgs(app): - # authlib OIDC settings - # key will need to be added - settings = {"OAUTH2_JWT_KEY": keys.default_private_key(app)} - app.config.update(settings) - config.update(settings) - - # only add the following if not already provided - config.setdefault("OAUTH2_JWT_ENABLED", True) - config.setdefault("OAUTH2_JWT_ALG", "RS256") - config.setdefault("OAUTH2_JWT_ISS", app.config["BASE_URL"]) - config.setdefault("OAUTH2_PROVIDER_ERROR_URI", "/api/oauth2/errors") - app.config.setdefault("OAUTH2_JWT_ENABLED", True) - app.config.setdefault("OAUTH2_JWT_ALG", "RS256") - app.config.setdefault("OAUTH2_JWT_ISS", app.config["BASE_URL"]) - app.config.setdefault("OAUTH2_PROVIDER_ERROR_URI", "/api/oauth2/errors") - - def _setup_oidc_clients(app): configured_idps = config.get("OPENID_CONNECT", {}) @@ -495,7 +474,10 @@ def _setup_oidc_clients(app): logger=logger, ) elif idp == "fence": - app.fence_client = OAuthClient(**settings) + # https://docs.authlib.org/en/latest/client/flask.html#flask-client + app.fence_client = OAuthClient(app) + # https://docs.authlib.org/en/latest/client/frameworks.html + app.fence_client.register(**settings) else: # generic OIDC implementation client = Oauth2ClientBase( settings=settings, @@ -531,27 +513,6 @@ def _setup_audit_service_client(app): ) -def _setup_prometheus(app): - # This environment variable MUST be declared before importing the - # prometheus modules (or unit tests fail) - # More details on this awkwardness: https://github.com/prometheus/client_python/issues/250 - os.environ["prometheus_multiproc_dir"] = PROMETHEUS_TMP_COUNTER_DIR.name - - from prometheus_client import ( - CollectorRegistry, - multiprocess, - make_wsgi_app, - ) - - app.prometheus_registry = CollectorRegistry() - multiprocess.MultiProcessCollector(app.prometheus_registry) - - # Add prometheus wsgi middleware to route /metrics requests - app.wsgi_app = DispatcherMiddleware( - app.wsgi_app, {"/metrics": make_wsgi_app(registry=app.prometheus_registry)} - ) - - @app.errorhandler(Exception) def handle_error(error): """ diff --git a/fence/auth.py b/fence/auth.py index 74d0cde49..bf758804e 100644 --- a/fence/auth.py +++ b/fence/auth.py @@ -40,7 +40,10 @@ def get_jwt(): try: bearer, token = header.split(" ") except ValueError: - raise Unauthorized("authorization header not in expected format") + msg = "authorization header not in expected format" + logger.debug(f"{msg}. Received header: {header}") + logger.error(f"{msg}.") + raise Unauthorized(msg) if bearer.lower() != "bearer": raise Unauthorized("expected bearer token in auth header") return token diff --git a/fence/blueprints/data/blueprint.py b/fence/blueprints/data/blueprint.py index 5dc1055c8..d8041b949 100755 --- a/fence/blueprints/data/blueprint.py +++ b/fence/blueprints/data/blueprint.py @@ -134,6 +134,8 @@ def upload_data_file(): authz = params.get("authz") uploader = None + guid = params.get("guid") + if authz: # if requesting an authz field, using new authorization method which doesn't # rely on uploader field, so clear it out @@ -165,7 +167,10 @@ def upload_data_file(): ) blank_index = BlankIndex( - file_name=params["file_name"], authz=params.get("authz"), uploader=uploader + file_name=params["file_name"], + authz=authz, + uploader=uploader, + guid=guid, ) default_expires_in = flask.current_app.config.get("MAX_PRESIGNED_URL_TTL", 3600) @@ -199,16 +204,16 @@ def upload_data_file(): def init_multipart_upload(): """ Initialize a multipart upload request - - NOTE This endpoint does not currently accept a `bucket` parameter like - `POST /upload` and `GET /upload/` do. """ params = flask.request.get_json() if not params: raise UserError("wrong Content-Type; expected application/json") if "file_name" not in params: raise UserError("missing required argument `file_name`") - blank_index = BlankIndex(file_name=params["file_name"]) + + guid = params.get("guid") + + blank_index = BlankIndex(file_name=params["file_name"], guid=guid) default_expires_in = flask.current_app.config.get("MAX_PRESIGNED_URL_TTL", 3600) expires_in = get_valid_expiration( @@ -288,7 +293,11 @@ def complete_multipart_upload(): raise UserError("missing required arguments: {}".format(list(missing))) default_expires_in = flask.current_app.config.get("MAX_PRESIGNED_URL_TTL", 3600) + bucket = params.get("bucket") + if bucket: + verify_data_upload_bucket_configuration(bucket) + expires_in = get_valid_expiration( params.get("expires_in"), max_limit=default_expires_in, diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index f7b9488f6..fe1383321 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -49,6 +49,7 @@ from fence.resources.ga4gh.passports import sync_gen3_users_authz_from_ga4gh_passports from fence.resources.audit.utils import enable_audit_logging from fence.utils import get_valid_expiration_from_request +from fence.metrics import metrics from . import multipart_upload from ...models import AssumeRoleCacheAWS, query_for_user, query_for_user_by_id @@ -77,6 +78,7 @@ def get_signed_url_for_file( ga4gh_passports=None, db_session=None, bucket=None, + drs="False", ): requested_protocol = requested_protocol or flask.request.args.get("protocol", None) r_pays_project = flask.request.args.get("userProject", None) @@ -164,12 +166,33 @@ def get_signed_url_for_file( user_sub=flask.g.audit_data.get("sub", ""), client_id=_get_client_id(), requested_protocol=requested_protocol, + action=action, + drs=drs, ) return {"url": signed_url} -def _log_signed_url_data_info(indexed_file, user_sub, client_id, requested_protocol): +def get_bucket_from_urls(urls, protocol): + """ + Return the bucket name from the first of the provided URLs that starts with the given protocol (usually `gs`, `s3`, `az`...) + """ + bucket = "" + for url in urls: + if "://" in url: + # Extract the protocol and the rest of the URL + bucket_protocol, rest_of_url = url.split("://", 1) + + if bucket_protocol == protocol: + # Extract bucket name + bucket = f"{bucket_protocol}://{rest_of_url.split('/')[0]}" + break + return bucket + + +def _log_signed_url_data_info( + indexed_file, user_sub, client_id, requested_protocol, action, drs="False" +): size_in_kibibytes = (indexed_file.index_document.get("size") or 0) / 1024 acl = indexed_file.index_document.get("acl") authz = indexed_file.index_document.get("authz") @@ -180,23 +203,25 @@ def _log_signed_url_data_info(indexed_file, user_sub, client_id, requested_proto protocol = indexed_file.indexed_file_locations[0].protocol # figure out which bucket was used based on the protocol - bucket = "" - for url in indexed_file.index_document.get("urls", []): - bucket_name = None - if "://" in url: - # Extract the protocol and the rest of the URL - bucket_protocol, rest_of_url = url.split("://", 1) - - if bucket_protocol == protocol: - # Extract bucket name - bucket = f"{bucket_protocol}://{rest_of_url.split('/')[0]}" - break + bucket = get_bucket_from_urls(indexed_file.index_document.get("urls", []), protocol) logger.info( - f"Signed URL Generated. size_in_kibibytes={size_in_kibibytes} " + f"Signed URL Generated. action={action} size_in_kibibytes={size_in_kibibytes} " f"acl={acl} authz={authz} bucket={bucket} user_sub={user_sub} client_id={client_id}" ) + metrics.add_signed_url_event( + action, + protocol, + acl, + authz, + bucket, + user_sub, + client_id, + drs, + size_in_kibibytes, + ) + def _get_client_id(): client_id = "Unknown Client" @@ -208,6 +233,7 @@ def _get_client_id(): return client_id + def prepare_presigned_url_audit_log(protocol, indexed_file): """ Store in `flask.g.audit_data` the data needed to record an audit log. @@ -232,7 +258,12 @@ class BlankIndex(object): """ def __init__( - self, uploader=None, file_name=None, logger_=None, guid=None, authz=None + self, + uploader=None, + file_name=None, + logger_=None, + guid=None, + authz=None, ): self.logger = logger_ or logger self.indexd = ( @@ -253,8 +284,9 @@ def __init__( self.file_name = file_name self.authz = authz - # if a guid is not provided, this will create a blank record for you - self.guid = guid or self.index_document["did"] + self.guid = guid + # .index_document is a cached property with code below, it creates/retrieves the actual record and this line updates the stored GUID to the returned record + self.guid = self.index_document["did"] @cached_property def index_document(self): @@ -266,6 +298,20 @@ def index_document(self): response from indexd (the contents of the record), containing ``guid`` and ``url`` """ + + if self.guid: + index_url = self.indexd.rstrip("/") + "/index/" + self.guid + indexd_response = requests.get(index_url) + if indexd_response.status_code == 200: + document = indexd_response.json() + self.logger.info(f"Record with {self.guid} id found in Indexd.") + return document + else: + raise NotFound(f"No indexed document found with id {self.guid}") + + return self._create_blank_record() + + def _create_blank_record(self): index_url = self.indexd.rstrip("/") + "/index/blank/" params = {"uploader": self.uploader, "file_name": self.file_name} diff --git a/fence/blueprints/data/multipart_upload.py b/fence/blueprints/data/multipart_upload.py index 96c3d6227..7352f66f1 100644 --- a/fence/blueprints/data/multipart_upload.py +++ b/fence/blueprints/data/multipart_upload.py @@ -140,6 +140,10 @@ def generate_presigned_url_for_uploading_part( Returns: presigned_url(str) """ + s3_buckets = get_value( + config, "S3_BUCKETS", InternalError("S3_BUCKETS not configured") + ) + bucket = s3_buckets.get(bucket_name) s3_buckets = get_value( config, "S3_BUCKETS", InternalError("S3_BUCKETS not configured") diff --git a/fence/blueprints/ga4gh.py b/fence/blueprints/ga4gh.py index 7b4ef0603..890da83f6 100644 --- a/fence/blueprints/ga4gh.py +++ b/fence/blueprints/ga4gh.py @@ -41,5 +41,7 @@ def get_ga4gh_signed_url(object_id, access_id): object_id, requested_protocol=access_id, ga4gh_passports=ga4gh_passports, + drs="True", ) + return flask.jsonify(result) diff --git a/fence/blueprints/login/base.py b/fence/blueprints/login/base.py index 0b6ae3f95..08fcab61d 100644 --- a/fence/blueprints/login/base.py +++ b/fence/blueprints/login/base.py @@ -7,6 +7,7 @@ from fence.blueprints.login.redirect import validate_redirect from fence.config import config from fence.errors import UserError +from fence.metrics import metrics logger = get_logger(__name__) @@ -133,6 +134,14 @@ def get(self): def post_login(self, user=None, token_result=None, **kwargs): prepare_login_log(self.idp_name) + metrics.add_login_event( + user_sub=flask.g.user.id, + idp=self.idp_name, + fence_idp=flask.session.get("fence_idp"), + shib_idp=flask.session.get("shib_idp"), + client_id=flask.session.get("client_id"), + ) + if token_result: username = token_result.get(self.username_field) if self.is_mfa_enabled: diff --git a/fence/blueprints/login/fence_login.py b/fence/blueprints/login/fence_login.py index 7efd49520..13b4de8c8 100644 --- a/fence/blueprints/login/fence_login.py +++ b/fence/blueprints/login/fence_login.py @@ -30,19 +30,22 @@ def __init__(self): def get(self): """Handle ``GET /login/fence``.""" - oauth2_redirect_uri = flask.current_app.fence_client.client_kwargs.get( - "redirect_uri" - ) + + # OAuth class can have mutliple clients + client = flask.current_app.fence_client._clients[ + flask.current_app.config["OPENID_CONNECT"]["fence"]["name"] + ] + + oauth2_redirect_uri = client.client_kwargs.get("redirect_uri") + redirect_url = flask.request.args.get("redirect") if redirect_url: validate_redirect(redirect_url) flask.session["redirect"] = redirect_url - ( - authorization_url, - state, - ) = flask.current_app.fence_client.generate_authorize_redirect( - oauth2_redirect_uri, prompt="login" - ) + + rv = client.create_authorization_url(oauth2_redirect_uri, prompt="login") + + authorization_url = rv["url"] # add idp parameter to the authorization URL if "idp" in flask.request.args: @@ -57,7 +60,7 @@ def get(self): flask.session["shib_idp"] = shib_idp authorization_url = add_params_to_uri(authorization_url, params) - flask.session["state"] = state + flask.session["state"] = rv["state"] return flask.redirect(authorization_url) @@ -88,16 +91,19 @@ def get(self): " login page for the original application to continue." ) # Get the token response and log in the user. - redirect_uri = flask.current_app.fence_client._get_session().redirect_uri - tokens = flask.current_app.fence_client.fetch_access_token( - redirect_uri, **flask.request.args.to_dict() + client_name = config["OPENID_CONNECT"]["fence"].get("name", "fence") + client = flask.current_app.fence_client._clients[client_name] + oauth2_redirect_uri = client.client_kwargs.get("redirect_uri") + + tokens = client.fetch_access_token( + oauth2_redirect_uri, **flask.request.args.to_dict() ) try: # For multi-Fence setup with two Fences >=5.0.0 id_token_claims = validate_jwt( tokens["id_token"], - aud=self.client.client_id, + aud=client.client_id, scope={"openid"}, purpose="id", attempt_refresh=True, diff --git a/fence/blueprints/login/google.py b/fence/blueprints/login/google.py index 0fa3e4cb5..2c7570795 100644 --- a/fence/blueprints/login/google.py +++ b/fence/blueprints/login/google.py @@ -25,4 +25,5 @@ def get(self): config.get("BASE_URL", "") + "/link/google/callback?code={}".format(flask.request.args.get("code")) ) + return super(GoogleCallback, self).get() diff --git a/fence/blueprints/login/utils.py b/fence/blueprints/login/utils.py index 4b189977e..3dfca2eae 100644 --- a/fence/blueprints/login/utils.py +++ b/fence/blueprints/login/utils.py @@ -21,7 +21,10 @@ def allowed_login_redirects(): with flask.current_app.db.session as session: clients = session.query(Client).all() for client in clients: - allowed.extend(client.redirect_uris) + if isinstance(client.redirect_uris, list): + allowed.extend(client.redirect_uris) + elif isinstance(client.redirect_uris, str): + allowed.append(client.redirect_uris) return {domain(url) for url in allowed} diff --git a/fence/blueprints/oauth2.py b/fence/blueprints/oauth2.py index d79f106aa..0d1428cf5 100644 --- a/fence/blueprints/oauth2.py +++ b/fence/blueprints/oauth2.py @@ -32,9 +32,14 @@ from fence.utils import clear_cookies from fence.user import get_current_user from fence.config import config - +from authlib.oauth2.rfc6749.errors import ( + InvalidScopeError, +) +from fence.utils import validate_scopes +from cdislogging import get_logger blueprint = flask.Blueprint("oauth2", __name__) +logger = get_logger(__name__) @blueprint.route("/authorize", methods=["GET", "POST"]) @@ -114,7 +119,7 @@ def authorize(*args, **kwargs): return flask.redirect(login_url) try: - grant = server.validate_consent_request(end_user=user) + grant = server.get_consent_grant(end_user=user) except OAuth2Error as e: raise Unauthorized("Failed to authorize: {}".format(str(e))) @@ -122,6 +127,13 @@ def authorize(*args, **kwargs): with flask.current_app.db.session as session: client = session.query(Client).filter_by(client_id=client_id).first() + # Need to do scope check here now due to our design of putting allowed_scope on client + # Authlib now put allowed scope on OIDC server side which doesn't work with our design without modification to the lib + # Doing the scope check here because both client and grant is available here + # Either Get or Post request + request_scopes = flask.request.args.get("scope") or flask.request.form.get("scope") + validate_scopes(request_scopes, client) + # TODO: any way to get from grant? confirm = flask.request.form.get("confirm") or flask.request.args.get("confirm") if client.auto_approve: diff --git a/fence/config-default.yaml b/fence/config-default.yaml index 9a7aec2d6..e0c6e680f 100755 --- a/fence/config-default.yaml +++ b/fence/config-default.yaml @@ -68,6 +68,7 @@ MOCK_STORAGE: true # WARNING: ONLY set to true when fence will be deployed in such a way that it will # ONLY receive traffic from internal clients and can safely use HTTP. AUTHLIB_INSECURE_TRANSPORT: true + # enable Prometheus Metrics for observability purposes # # WARNING: Any counters, gauges, histograms, etc. should be carefully @@ -144,6 +145,9 @@ OPENID_CONNECT: # If this fence instance is a client of another fence, fill this cfg out. # REMOVE if not needed fence: + # Custom name to display for consent screens. If not provided, will use `fence`. + # If the other fence is using NIH Login, you should make name: `NIH Login` + name: '' # this api_base_url should be the root url for the OTHER fence # something like: https://example.com api_base_url: '' @@ -161,9 +165,6 @@ OPENID_CONNECT: authorize_url: '{{api_base_url}}/oauth2/authorize' access_token_url: '{{api_base_url}}/oauth2/token' refresh_token_url: '{{api_base_url}}/oauth2/token' - # Custom name to display for consent screens. If not provided, will use `fence`. - # If the other fence is using NIH Login, you should make name: `NIH Login` - name: '' # if mock is true, will fake a successful login response for login # WARNING: DO NOT ENABLE IN PRODUCTION (for testing purposes only) mock: false @@ -392,16 +393,9 @@ ENABLED_IDENTITY_PROVIDERS: {} # ////////////////////////////////////////////////////////////////////////////////////// -# LIBRARY CONFIGURATION (authlib & flask) +# LIBRARY CONFIGURATION (flask) # - Already contains reasonable defaults # ////////////////////////////////////////////////////////////////////////////////////// -# authlib-specific configs for OIDC flow and JWTs -# NOTE: the OAUTH2_JWT_KEY cfg gets set automatically by fence if keys are setup -# correctly -OAUTH2_JWT_ALG: 'RS256' -OAUTH2_JWT_ENABLED: true -OAUTH2_JWT_ISS: '{{BASE_URL}}' -OAUTH2_PROVIDER_ERROR_URI: '/api/oauth2/errors' # used for flask, "path mounted under by the application / web server" # since we deploy as microservices, fence is typically under {{base}}/user @@ -700,7 +694,7 @@ GS_BUCKETS: {} # bucket3: # region: 'us-east-1' -# When using the Cleversafe storageclient, whether or not to send verify=true +# When using the Cleversafe storageclient, whether or not to send verify=true # for requests VERIFY_CLEVERSAFE_CERT: true diff --git a/fence/jwt/utils.py b/fence/jwt/utils.py index eada044a7..be6c60f4b 100644 --- a/fence/jwt/utils.py +++ b/fence/jwt/utils.py @@ -1,8 +1,13 @@ import flask +from cdislogging import get_logger + from fence.errors import Unauthorized +logger = get_logger(__name__) + + def get_jwt_header(): """ Get the user's JWT from the Authorization header, or raise Unauthorized on failure. @@ -18,5 +23,8 @@ def get_jwt_header(): try: jwt = header.split(" ")[1] except IndexError: - raise Unauthorized("authorization header missing token") + msg = "authorization header missing token" + logger.debug(f"{msg}. Received header: {header}") + logger.error(f"{msg}.") + raise Unauthorized(msg) return jwt diff --git a/fence/metrics.py b/fence/metrics.py new file mode 100644 index 000000000..acdb200a9 --- /dev/null +++ b/fence/metrics.py @@ -0,0 +1,199 @@ +""" +Metrics are collected by the Prometheus client and exposed at the `/metrics` endpoint. + +To add a new metric: +- Add a new method to the `Metrics` class below (see `add_login_event` and `add_signed_url_event` +for example). +- The new method should call the `_increment_counter` and/or `_set_gauge` methods with the +appropriate metric name and labels. +- Call the new method from the code where relevant, for example: + from fence.metric import metrics + metrics.add_login_event(...) +- Add unit tests to the `tests/test_metrics` file. +""" + + +import os +import pathlib + +from cdislogging import get_logger +from prometheus_client import ( + CollectorRegistry, + multiprocess, + Counter, + Gauge, + generate_latest, + CONTENT_TYPE_LATEST, +) + +from fence.config import config + + +logger = get_logger(__name__) + + +class Metrics: + """ + Class to handle Prometheus metrics + Attributes: + registry (CollectorRegistry): Prometheus registry + metrics (dict): Dictionary to store Prometheus metrics + """ + + def __init__(self, prometheus_dir="/var/tmp/uwsgi_flask_metrics"): + pathlib.Path(prometheus_dir).mkdir(parents=True, exist_ok=True) + os.environ["PROMETHEUS_MULTIPROC_DIR"] = prometheus_dir + + self._registry = CollectorRegistry() + multiprocess.MultiProcessCollector(self._registry) + self._metrics = {} + + # set the descriptions of new metrics here. Descriptions not specified here + # will default to the metric name. + self._counter_descriptions = { + "gen3_fence_presigned_url": "Fence presigned urls", + "gen3_fence_login": "Fence logins", + } + self._gauge_descriptions = { + "gen3_fence_presigned_url_size": "Fence presigned urls", + } + + def get_latest_metrics(self): + """ + Generate the latest Prometheus metrics + Returns: + str: Latest Prometheus metrics + str: Content type of the latest Prometheus metrics + """ + # When metrics gathering is not enabled, the metrics endpoint should not error, but it should + # not return any data. + if not config["ENABLE_PROMETHEUS_METRICS"]: + return "", None + + return generate_latest(self._registry), CONTENT_TYPE_LATEST + + def _increment_counter(self, name, labels): + """ + Increment a Prometheus counter metric. + Note that this function should not be called directly - implement a function like + `add_login_event` instead. A metric's labels should always be consistent. + Args: + name (str): Name of the metric + labels (dict): Dictionary of labels for the metric + """ + # create the counter if it doesn't already exist + if name not in self._metrics: + description = self._counter_descriptions.get(name, name) + logger.info( + f"Creating counter '{name}' with description '{description}' and labels: {labels}" + ) + self._metrics[name] = Counter(name, description, [*labels.keys()]) + elif type(self._metrics[name]) != Counter: + raise ValueError( + f"Trying to create counter '{name}' but a {type(self._metrics[name])} with this name already exists" + ) + + logger.debug(f"Incrementing counter '{name}' with labels: {labels}") + self._metrics[name].labels(*labels.values()).inc() + + def _set_gauge(self, name, labels, value): + """ + Set a Prometheus gauge metric. + Note that this function should not be called directly - implement a function like + `add_signed_url_event` instead. A metric's labels should always be consistent. + Args: + name (str): Name of the metric + labels (dict): Dictionary of labels for the metric + value (int): Value to set the metric to + """ + # create the gauge if it doesn't already exist + if name not in self._metrics: + description = self._gauge_descriptions.get(name, name) + logger.info( + f"Creating gauge '{name}' with description '{description}' and labels: {labels}" + ) + self._metrics[name] = Gauge(name, description, [*labels.keys()]) + elif type(self._metrics[name]) != Gauge: + raise ValueError( + f"Trying to create gauge '{name}' but a {type(self._metrics[name])} with this name already exists" + ) + + logger.debug(f"Setting gauge '{name}' with labels: {labels}") + self._metrics[name].labels(*labels.values()).set(value) + + def add_login_event(self, user_sub, idp, fence_idp, shib_idp, client_id): + """ + Record a login event + """ + if not config["ENABLE_PROMETHEUS_METRICS"]: + return + self._increment_counter( + "gen3_fence_login", + { + "user_sub": user_sub, + "idp": idp, + "client_id": client_id, + "fence_idp": fence_idp, + "shib_idp": shib_idp, + }, + ) + self._increment_counter( + "gen3_fence_login", + { + "user_sub": user_sub, + "idp": "all", + "client_id": client_id, + # when counting all IDPs, we don't care about the fence and shib IDP values + "fence_idp": None, + "shib_idp": None, + }, + ) + + def add_signed_url_event( + self, + action, + protocol, + acl, + authz, + bucket, + user_sub, + client_id, + drs, + size_in_kibibytes, + ): + """ + Record a signed URL event + """ + if not config["ENABLE_PROMETHEUS_METRICS"]: + return + self._increment_counter( + "gen3_fence_presigned_url", + { + "action": action, + "protocol": protocol, + "acl": acl, + "authz": authz, + "bucket": bucket, + "user_sub": user_sub, + "client_id": client_id, + "drs": drs, + }, + ) + self._set_gauge( + "gen3_fence_presigned_url_size", + { + "action": action, + "protocol": protocol, + "acl": acl, + "authz": authz, + "bucket": bucket, + "user_sub": user_sub, + "client_id": client_id, + "drs": drs, + }, + size_in_kibibytes, + ) + + +# Initialize the Metrics instance +metrics = Metrics() diff --git a/fence/models.py b/fence/models.py index b5fdb9a8e..da4d81b3d 100644 --- a/fence/models.py +++ b/fence/models.py @@ -8,9 +8,14 @@ """ from enum import Enum - from marshmallow_sqlalchemy import SQLAlchemyAutoSchema -from authlib.flask.oauth2.sqla import OAuth2AuthorizationCodeMixin, OAuth2ClientMixin +from authlib.integrations.sqla_oauth2 import ( + OAuth2AuthorizationCodeMixin, + OAuth2ClientMixin, +) + +import time +import json import bcrypt from datetime import datetime, timedelta import flask @@ -153,7 +158,7 @@ def get_client_expires_at(expires_in, grant_types): # `timestamp()` already converts to UTC expires_at = (datetime.now() + timedelta(days=expires_in)).timestamp() - if "client_credentials" in grant_types.split("\n"): + if "client_credentials" in grant_types: if not expires_in or expires_in <= 0 or expires_in > 366: logger.warning( "Credentials with the 'client_credentials' grant which will be used externally are required to expire within 12 months. Use the `--expires-in` parameter to add an expiration." @@ -227,9 +232,9 @@ class Client(Base, OAuth2ClientMixin): __tablename__ = "client" - client_id = Column(String(40), primary_key=True) + client_id = Column(String(48), primary_key=True, index=True) # this is hashed secret - client_secret = Column(String(60), unique=True, index=True, nullable=True) + client_secret = Column(String(120), unique=True, index=True, nullable=True) # human readable name name = Column(String(40), nullable=False) @@ -250,46 +255,51 @@ class Client(Base, OAuth2ClientMixin): # public or confidential is_confidential = Column(Boolean, default=True) - # NOTE: DEPRECATED - # Client now uses `redirect_uri` column, from authlib client model - _redirect_uris = Column(Text) - - _allowed_scopes = Column(Text, nullable=False, default="") + expires_at = Column(Integer, nullable=False, default=0) + # Deprecated, keeping these around in case it is needed later _default_scopes = Column(Text) _scopes = ["compute", "storage", "user"] - expires_at = Column(Integer, nullable=False, default=0) + def __init__(self, client_id, expires_in=0, **kwargs): - # note that authlib adds a response_type column which is not used here + # New Json object for Authlib Oauth client + if "_client_metadata" in kwargs: + client_metadata = json.loads(kwargs.pop("_client_metadata")) + else: + client_metadata = {} - def __init__(self, client_id, expires_in=0, **kwargs): - """ - NOTE that for authlib, the client must have an attribute ``redirect_uri`` which - is a newline-delimited list of valid redirect URIs. - """ if "allowed_scopes" in kwargs: allowed_scopes = kwargs.pop("allowed_scopes") if isinstance(allowed_scopes, list): - kwargs["_allowed_scopes"] = " ".join(allowed_scopes) + client_metadata["scope"] = " ".join(allowed_scopes) else: - kwargs["_allowed_scopes"] = allowed_scopes + client_metadata["scope"] = allowed_scopes + + # redirect uri is now part of authlibs client_metadata if "redirect_uris" in kwargs: redirect_uris = kwargs.pop("redirect_uris") if isinstance(redirect_uris, list): - kwargs["redirect_uri"] = "\n".join(redirect_uris) + # redirect_uris is now part of the metadata json object + client_metadata["redirect_uris"] = redirect_uris + elif redirect_uris: + client_metadata["redirect_uris"] = [redirect_uris] else: - kwargs["redirect_uri"] = redirect_uris + client_metadata["redirect_uris"] = [] + # default grant types to allow for auth code flow and resfreshing grant_types = kwargs.pop("grant_types", None) or [ GrantType.code.value, GrantType.refresh.value, ] + # grant types is now part of authlibs client_metadata if isinstance(grant_types, list): - kwargs["grant_type"] = "\n".join(grant_types) + client_metadata["grant_types"] = grant_types + elif grant_types: + # assume it's already in correct format and make it a list + client_metadata["grant_types"] = [grant_types] else: - # assume it's already in correct format - kwargs["grant_type"] = grant_types + client_metadata["grant_types"] = [] supported_grant_types = [ "authorization_code", @@ -299,28 +309,50 @@ def __init__(self, client_id, expires_in=0, **kwargs): ] assert all( grant_type in supported_grant_types - for grant_type in kwargs["grant_type"].split("\n") - ), f"Grant types '{kwargs['grant_type']}' are not in supported types {supported_grant_types}" + for grant_type in client_metadata["grant_types"] + ), f"Grant types '{client_metadata['grant_types']}' are not in supported types {supported_grant_types}" - if "authorization_code" in kwargs["grant_type"].split("\n"): + if "authorization_code" in client_metadata["grant_types"]: assert kwargs.get("user") or kwargs.get( "user_id" ), "A username is required for the 'authorization_code' grant" - assert kwargs.get( - "redirect_uri" + assert client_metadata.get( + "redirect_uris" ), "Redirect URL(s) are required for the 'authorization_code' grant" - expires_at = get_client_expires_at( - expires_in=expires_in, grant_types=kwargs["grant_type"] - ) - if expires_at: - kwargs["expires_at"] = expires_at + # response_types is now part of authlib's client_metadata + response_types = kwargs.pop("response_types", None) + if isinstance(response_types, list): + client_metadata["response_types"] = "\n".join(response_types) + elif response_types: + # assume it's already in correct format + client_metadata["response_types"] = [response_types] + else: + client_metadata["response_types"] = [] + + if "token_endpoint_auth_method" in kwargs: + client_metadata["token_endpoint_auth_method"] = kwargs.pop( + "token_endpoint_auth_method" + ) + + # Do this if expires_in is specified or expires_at is not supplied + if expires_in != 0 or ("expires_at" not in kwargs): + expires_at = get_client_expires_at( + expires_in=expires_in, grant_types=client_metadata["grant_types"] + ) + if expires_at: + kwargs["expires_at"] = expires_at + + if "client_id_issued_at" not in kwargs or kwargs["client_id_issued_at"] is None: + kwargs["client_id_issued_at"] = int(time.time()) + + kwargs["_client_metadata"] = json.dumps(client_metadata) super(Client, self).__init__(client_id=client_id, **kwargs) @property def allowed_scopes(self): - return self._allowed_scopes.split(" ") + return self.scope.split(" ") @property def client_type(self): @@ -334,16 +366,6 @@ def client_type(self): return "public" return "confidential" - @property - def default_redirect_uri(self): - return self.redirect_uris[0] - - @property - def default_scopes(self): - if self._default_scopes: - return self._default_scopes.split() - return [] - @staticmethod def get_by_client_id(client_id): with flask.current_app.db.session as session: @@ -366,18 +388,18 @@ def check_requested_scopes(self, scopes): return False return set(self.allowed_scopes).issuperset(scopes) - def check_token_endpoint_auth_method(self, method): + # Replaces Authlib method. Our logic does not actually look at token_auth_endpoint value + def check_endpoint_auth_method(self, method, endpoint): """ Only basic auth is supported. If anything else gets added, change this """ - protected_types = [ClientAuthType.basic.value, ClientAuthType.post.value] - return (self.is_confidential and method in protected_types) or ( - not self.is_confidential and method == ClientAuthType.none.value - ) + if endpoint == "token": + protected_types = [ClientAuthType.basic.value, ClientAuthType.post.value] + return (self.is_confidential and method in protected_types) or ( + not self.is_confidential and method == ClientAuthType.none.value + ) - def validate_scopes(self, scopes): - scopes = scopes[0].split(",") - return all(scope in self._scopes for scope in scopes) + return True def check_response_type(self, response_type): allowed_response_types = [] diff --git a/fence/oidc/endpoints.py b/fence/oidc/endpoints.py index b0ccbcacd..254d9ef69 100644 --- a/fence/oidc/endpoints.py +++ b/fence/oidc/endpoints.py @@ -7,7 +7,7 @@ from fence.errors import BlacklistingError import fence.jwt.blacklist - +import jwt logger = get_logger(__name__) @@ -18,20 +18,20 @@ class RevocationEndpoint(authlib.oauth2.rfc7009.RevocationEndpoint): server should handle requests for token revocation. """ - def query_token(self, token, token_type_hint, client): + def query_token(self, token, token_type_hint): """ Look up a token. Since all tokens are JWT, just return the token. """ - return token + return JWTToken(token) - def revoke_token(self, token): + def revoke_token(self, token, request): """ Revoke a token. """ try: - fence.jwt.blacklist.blacklist_encoded_token(token) + fence.jwt.blacklist.blacklist_encoded_token(token.encoded_string) except BlacklistingError as err: logger.info( "Token provided for revocation is not valid. " @@ -109,3 +109,25 @@ def create_revocation_response(self): finally: body = {"error": message} if message != "" else {} return (status, body, headers) + + +class JWTToken(object): + def __init__(self, token): + self.encoded_string = token + self.client_id = jwt.decode( + token, algorithms=["RS256"], options={"verify_signature": False} + ).get("azp") + + def check_client(self, client): + """ + Check if token is issued by the same client + Expected function by Authlib + + Args: + client: oidc client + + Returns: + boolean value whether client_id matches + """ + + return self.client_id == client.client_id diff --git a/fence/oidc/grants/__init__.py b/fence/oidc/grants/__init__.py index f3740ba19..c6e372bd7 100644 --- a/fence/oidc/grants/__init__.py +++ b/fence/oidc/grants/__init__.py @@ -1,4 +1,4 @@ from fence.oidc.grants.implicit_grant import ImplicitGrant -from fence.oidc.grants.oidc_code_grant import OpenIDCodeGrant +from fence.oidc.grants.oidc_code_grant import AuthorizationCodeGrant from fence.oidc.grants.refresh_token_grant import RefreshTokenGrant from fence.oidc.grants.client_credentials_grant import ClientCredentialsGrant diff --git a/fence/oidc/grants/implicit_grant.py b/fence/oidc/grants/implicit_grant.py index e1532b926..784f412c9 100644 --- a/fence/oidc/grants/implicit_grant.py +++ b/fence/oidc/grants/implicit_grant.py @@ -14,7 +14,7 @@ def exists_nonce(self, nonce, request): return True return False - def create_authorization_response(self, grant_user): + def create_authorization_response(self, redirect_uri, grant_user): """ Overrides method from authlib---authlib has some peculiarities here such as trying to access ``token["scope"]`` from the token response which is not @@ -22,6 +22,9 @@ def create_authorization_response(self, grant_user): here: https://openid.net/specs/openid-connect-core-1_0.html#ImplicitAuthResponse + + 2024-04-19 + TODO: Re-evaluate this whether if it is still necessary. """ state = self.request.state if grant_user: @@ -46,7 +49,7 @@ def create_authorization_response(self, grant_user): # http://openid.net/specs/oauth-v2-multiple-response-types-1_0.html#ResponseModes return create_response_mode_response( - redirect_uri=self.redirect_uri, + redirect_uri=redirect_uri, params=params, response_mode=self.request.data.get( "response_mode", self.DEFAULT_RESPONSE_MODE diff --git a/fence/oidc/grants/oidc_code_grant.py b/fence/oidc/grants/oidc_code_grant.py index 771b6b59d..0514e68a5 100644 --- a/fence/oidc/grants/oidc_code_grant.py +++ b/fence/oidc/grants/oidc_code_grant.py @@ -1,27 +1,31 @@ from authlib.common.security import generate_token -from authlib.oidc.core import grants +from authlib.oauth2.rfc6749 import grants from authlib.oidc.core.errors import ( AccountSelectionRequiredError, ConsentRequiredError, LoginRequiredError, ) -from authlib.oauth2.rfc6749 import InvalidRequestError +from authlib.oauth2.rfc6749 import ( + InvalidRequestError, + UnauthorizedClientError, + InvalidGrantError, +) import flask from fence.utils import get_valid_expiration_from_request from fence.config import config from fence.models import AuthorizationCode, ClientAuthType, User +from cdislogging import get_logger + +logger = get_logger(__name__) -class OpenIDCodeGrant(grants.OpenIDCodeGrant): +class AuthorizationCodeGrant(grants.AuthorizationCodeGrant): TOKEN_ENDPOINT_AUTH_METHODS = [auth_type.value for auth_type in ClientAuthType] def __init__(self, *args, **kwargs): - super(OpenIDCodeGrant, self).__init__(*args, **kwargs) + super(AuthorizationCodeGrant, self).__init__(*args, **kwargs) # Override authlib validate_request_prompt with our own, to fix login prompt behavior - self._hooks["after_validate_consent_request"].discard( - grants.util.validate_request_prompt - ) self.register_hook( "after_validate_consent_request", self.validate_request_prompt ) @@ -60,12 +64,53 @@ def create_authorization_code(client, grant_user, request): return code.code + def save_authorization_code(self, code, request): + """Save authorization_code for later use. Must be implemented. + + Args: + code: authorization code string + request: HTTP request + + Returns: + authorization code string + """ + # requested lifetime (in seconds) for the refresh token + refresh_token_expires_in = get_valid_expiration_from_request( + expiry_param="refresh_token_expires_in", + max_limit=config["REFRESH_TOKEN_EXPIRES_IN"], + default=config["REFRESH_TOKEN_EXPIRES_IN"], + ) + + client = request.client + code = AuthorizationCode( + code=code, + client_id=client.client_id, + redirect_uri=request.redirect_uri, + scope=request.scope, + user_id=request.user.id, + nonce=request.data.get("nonce"), + refresh_token_expires_in=refresh_token_expires_in, + ) + + with flask.current_app.db.session as session: + session.add(code) + session.commit() + return code.code + def generate_token(self, *args, **kwargs): return self.server.generate_token(*args, **kwargs) def create_token_response(self): + """Generate Tokens + + Raises: + InvalidRequestError: if no user present in authorization code + + Returns: + HTTP status code, token, HTTP response header + """ client = self.request.client - authorization_code = self.request.credential + authorization_code = self.request.authorization_code user = self.authenticate_user(authorization_code) if not user: @@ -80,7 +125,7 @@ def create_token_response(self): self.GRANT_TYPE, user=user, scope=scope, - include_refresh_token=client.has_client_secret(), + include_refresh_token=bool(client.client_secret), nonce=nonce, refresh_token_expires_in=refresh_token_expires_in, ) @@ -92,7 +137,7 @@ def create_token_response(self): return 200, token, self.TOKEN_RESPONSE_HEADER @staticmethod - def parse_authorization_code(code, client): + def query_authorization_code(code, client): """ Search for an ``AuthorizationCode`` matching the given code string and client. @@ -142,7 +187,7 @@ def exists_nonce(self, nonce, request): return True return False - def validate_request_prompt(self, end_user): + def validate_request_prompt(self, end_user, redirect_uri): """ Override method in authlib to fix behavior with login prompt. """ @@ -175,3 +220,47 @@ def validate_request_prompt(self, end_user): self.prompt = prompt return self + + def validate_token_request(self): + """ + Validate token request by checking allowed grant type, + making sure authorization code is found, and redirect URI is valid + + Raises: + UnauthorizedClientError: if grant type is incorrect + InvalidRequestError: if authorization code is absent + InvalidGrantError: if authorization code is invalid + InvalidGrantError: if redirect_uri is invalid + """ + # authenticate the client if client authentication is included + logger.debug("Authenticating token client..") + client = self.authenticate_token_endpoint_client() + + logger.debug("Validate token request of %r", client) + if not client.check_grant_type(self.GRANT_TYPE): + raise UnauthorizedClientError( + f'The client is not authorized to use "grant_type={self.GRANT_TYPE}"' + ) + + code = self.request.data.get("code") + if code is None: + raise InvalidRequestError('Missing "code" in request.') + + # ensure that the authorization code was issued to the authenticated + # confidential client, or if the client is public, ensure that the + # code was issued to "client_id" in the request + authorization_code = self.query_authorization_code(code, client) + if not authorization_code: + raise InvalidGrantError('Invalid "code" in request.') + + # validate redirect_uri parameter + logger.debug("Validate token redirect_uri of %r", client) + redirect_uri = self.request.redirect_uri + original_redirect_uri = authorization_code.get_redirect_uri() + if original_redirect_uri and redirect_uri != original_redirect_uri: + raise InvalidGrantError('Invalid "redirect_uri" in request.') + + # save for create_token_response + self.request.client = client + self.request.authorization_code = authorization_code + self.execute_hook("after_validate_token_request") diff --git a/fence/oidc/grants/refresh_token_grant.py b/fence/oidc/grants/refresh_token_grant.py index 5c01a9a6a..b607bd9af 100644 --- a/fence/oidc/grants/refresh_token_grant.py +++ b/fence/oidc/grants/refresh_token_grant.py @@ -4,6 +4,7 @@ InvalidRequestError, InvalidScopeError, UnauthorizedClientError, + InvalidGrantError, ) from authlib.oauth2.rfc6749.grants import RefreshTokenGrant as AuthlibRefreshTokenGrant from authlib.oauth2.rfc6749.util import scope_to_list @@ -74,7 +75,7 @@ def validate_token_request(self): raise UnauthorizedClientError("invalid grant type") self.request.client = client self.authenticate_token_endpoint_client() - token = self._validate_request_token() + token = self._validate_request_token(client) self._validate_token_scope(token) self.request.credential = token @@ -141,7 +142,10 @@ def create_token_response(self): ##### end refresh token patch block ##### expires_in = credential["exp"] token = self.generate_token( - client, self.GRANT_TYPE, user=user, expires_in=expires_in, scope=scope + user=user, + scope=scope, + grant_type=self.GRANT_TYPE, + expires_in=expires_in, ) # replace the newly generated refresh token with the one provided @@ -154,14 +158,29 @@ def create_token_response(self): if self.GRANT_TYPE == "refresh_token": token["refresh_token"] = self.request.data.get("refresh_token", "") - # TODO - logger.info("") - self.request.user = user self.server.save_token(token, self.request) self.execute_hook("process_token", token=token) return 200, token, self.TOKEN_RESPONSE_HEADER + def _validate_request_token(self, client): + """ + OVERRIDES method from authlib. + + Why? Becuase our "token" is not a class with `check_client` method. + So we just need to treat it like a dictionary. + """ + refresh_token = self.request.data.get("refresh_token") + if refresh_token is None: + raise InvalidRequestError( + 'Missing "refresh_token" in request.', + ) + + token = self.authenticate_refresh_token(refresh_token) + if not token or not token["azp"] == client.get_client_id(): + raise InvalidGrantError() + return token + def _validate_token_scope(self, token): """ OVERRIDES method from authlib. diff --git a/fence/oidc/jwt_generator.py b/fence/oidc/jwt_generator.py index dfba556b6..2787add77 100644 --- a/fence/oidc/jwt_generator.py +++ b/fence/oidc/jwt_generator.py @@ -15,6 +15,7 @@ ) from fence.config import config +from fence.utils import validate_scopes def generate_token(client, grant_type, **kwargs): @@ -47,6 +48,10 @@ def generate_token(client, grant_type, **kwargs): claims (to avoid having to encode or decode the refresh token here) """ + # We need to validate scopes here because Authlib only check request scope against + # server's allowed_scopes + validate_scopes(kwargs["scope"], client) + if grant_type == "authorization_code" or grant_type == "refresh_token": return generate_token_response(client, grant_type, **kwargs) elif grant_type == "implicit": diff --git a/fence/oidc/oidc_server.py b/fence/oidc/oidc_server.py index 391e42d92..9b9aefad7 100644 --- a/fence/oidc/oidc_server.py +++ b/fence/oidc/oidc_server.py @@ -1,14 +1,25 @@ -from authlib.common.urls import urlparse, url_decode -from authlib.flask.oauth2 import AuthorizationServer +import flask + +from fence.oidc.errors import InvalidClientError +from fence.oidc.jwt_generator import generate_token + +from authlib.integrations.flask_oauth2 import AuthorizationServer from authlib.oauth2.rfc6749.authenticate_client import ( ClientAuthentication as AuthlibClientAuthentication, ) -from authlib.oauth2.rfc6749.errors import InvalidClientError as AuthlibClientError -import flask +from authlib.oauth2.rfc6749.errors import ( + InvalidClientError as AuthlibClientError, + OAuth2Error, + UnsupportedGrantTypeError, +) -from fence.oidc.errors import InvalidClientError -from fence.oidc.jwt_generator import generate_token +from fence import logger +from cdislogging import get_logger +from flask.wrappers import Request +from authlib.oauth2.rfc6749 import OAuth2Request + +logger = get_logger(__name__) class ClientAuthentication(AuthlibClientAuthentication): @@ -17,23 +28,30 @@ class ClientAuthentication(AuthlibClientAuthentication): in order to authenticate OAuth clients. """ - def authenticate(self, request, methods): + def authenticate(self, request, methods, endpoint): """ Override method from authlib """ - client = super(ClientAuthentication, self).authenticate(request, methods) + client = super(ClientAuthentication, self).authenticate( + request, methods, endpoint + ) + + logger.info("oidc_server.py clientAuthentioncation authenticate complete") # don't allow confidential clients to not use auth if client.is_confidential: m = list(methods) if "none" in m: m.remove("none") try: - client = super(ClientAuthentication, self).authenticate(request, m) + client = super(ClientAuthentication, self).authenticate( + request, m, endpoint + ) except AuthlibClientError: raise InvalidClientError( - "OAuth client failed to authenticate; client ID or secret is" + "Confidential OAuth client failed to authenticate; client ID or secret is" " missing or incorrect" ) + return client @@ -53,6 +71,57 @@ def init_app(self, app, query_client=None, save_token=None): self.save_token = save_token self.app = app self.generate_token = generate_token - self.init_jwt_config(app) if getattr(self, "query_client"): self.authenticate_client = ClientAuthentication(query_client) + + # 2023-09-29 + # Below code replaces authlib functions. It does the same thing as authlib 1.2.1 except it returns grant_scope from + # either args or forms. Authlib 1.2.1 forces grant_type to be part of post request body which isn't the current use case. + # https://github.com/lepture/authlib/blob/a6e89f8e6cf6f6bebd63dcdc2665b7d22cf0fde3/authlib/oauth2/rfc6749/requests.py#L59C10-L59C10 + def create_token_response(self, request=None): + """Validate token request and create token response. + + Args: + request: HTTP request instance + Returns: + HTTP response with token + """ + request = self.create_oauth2_request(request) + + try: + grant = self.get_token_grant(request) + except UnsupportedGrantTypeError as error: + return self.handle_error_response(request, error) + + logger.debug("Got grant succesfully") + + try: + grant.validate_token_request() + logger.debug("Token Request validated succesfully") + args = grant.create_token_response() + logger.debug("Token created succesfully") + return self.handle_response(*args) + except OAuth2Error as error: + return self.handle_error_response(request, error) + + def create_oauth2_request(self, request): + return FenceOAuth2Request(flask.request) + + +class FenceOAuth2Request(OAuth2Request): + def __init__(self, request: Request): + super().__init__(request.method, request.url, None, request.headers) + self._request = request + + @property + def args(self): + return self._request.args + + @property + def form(self): + return self._request.values + + # Get grant_type from either url or body + @property + def grant_type(self) -> str: + return self.data.get("grant_type") diff --git a/fence/oidc/server.py b/fence/oidc/server.py index 846fd4224..67d0a6089 100644 --- a/fence/oidc/server.py +++ b/fence/oidc/server.py @@ -9,7 +9,7 @@ from fence.oidc.client import authenticate_public_client, query_client from fence.oidc.endpoints import RevocationEndpoint from fence.oidc.grants import ( - OpenIDCodeGrant, + AuthorizationCodeGrant, ImplicitGrant, RefreshTokenGrant, ClientCredentialsGrant, @@ -18,7 +18,7 @@ server = OIDCServer(query_client=query_client, save_token=lambda *_: None) -server.register_grant(OpenIDCodeGrant) +server.register_grant(AuthorizationCodeGrant) server.register_grant(ImplicitGrant) server.register_grant(RefreshTokenGrant) server.register_grant(ClientCredentialsGrant) diff --git a/fence/resources/aws/boto_manager.py b/fence/resources/aws/boto_manager.py index 93a5366ed..e0744554a 100644 --- a/fence/resources/aws/boto_manager.py +++ b/fence/resources/aws/boto_manager.py @@ -17,13 +17,34 @@ class BotoManager(object): 900 # minimum time for aws assume role is 900 seconds as per boto docs ) - def __init__(self, config, logger): - self.sts_client = client("sts", **config) - self.s3_client = client("s3", **config) + def __init__(self, config, buckets, logger): + default = list(config.values())[0] + self.sts_client = client("sts", **default) + self.s3_client = client("s3", **default) + self.s3_clients = self.create_s3_clients(config, buckets) self.logger = logger self.ec2 = None self.iam = None + def create_s3_clients(self, config, buckets): + s3_clients = {} + for bucket in buckets: + cred_name = buckets[bucket]['cred'] + creds = {} + if cred_name != '*': + creds = config[cred_name] + if 'endpoint_url' in buckets[bucket]: + endpoint_url = buckets[bucket]['endpoint_url'] + s3_clients[bucket] = client('s3', **creds, endpoint_url=endpoint_url) + else: + s3_clients[bucket] = client('s3', **creds) + return s3_clients + + def get_s3_client(self, bucket): + if self.s3_clients.get(bucket) is None: + return self.s3_clients[0] + return self.s3_clients[bucket] + def delete_data_file(self, bucket, prefix): """ We use buckets with versioning disabled. @@ -33,7 +54,8 @@ def delete_data_file(self, bucket, prefix): https://docs.aws.amazon.com/AmazonS3/latest/dev/DeletingObjectsfromVersioningSuspendedBuckets.html """ try: - s3_objects = self.s3_client.list_objects_v2( + s3_client = self.get_s3_client(bucket) + s3_objects = s3_client.list_objects_v2( Bucket=bucket, Prefix=prefix, Delimiter="/" ) @@ -52,7 +74,7 @@ def delete_data_file(self, bucket, prefix): self.logger.error("multiple files found with prefix {}".format(prefix)) return ("Multiple files found matching this prefix. Backing off.", 400) key = s3_objects["Contents"][0]["Key"] - self.s3_client.delete_object(Bucket=bucket, Key=key) + s3_client.delete_object(Bucket=bucket, Key=key) self.logger.info( "deleted file for prefix {} in bucket {}".format(prefix, bucket) ) diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index 3c681b1e5..c2e497085 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -1,4 +1,4 @@ -from authlib.client import OAuth2Session +from authlib.integrations.requests_client import OAuth2Session from cached_property import cached_property from flask import current_app from jose import jwt diff --git a/fence/resources/user/user_session.py b/fence/resources/user/user_session.py index 326c84860..fc061a74c 100644 --- a/fence/resources/user/user_session.py +++ b/fence/resources/user/user_session.py @@ -190,8 +190,8 @@ def save_session(self, app, session, response): token = session.get_updated_token(app) if token: response.set_cookie( - app.config["SESSION_COOKIE_NAME"], - token, + key=app.config["SESSION_COOKIE_NAME"], + value=token, expires=self.get_expiration_time(app, session), httponly=True, domain=domain, @@ -210,7 +210,7 @@ def save_session(self, app, session, response): # okay if user is hitting with just an access_token if user_sess_id != "" and not user: response.set_cookie( - config["ACCESS_TOKEN_COOKIE_NAME"], + key=config["ACCESS_TOKEN_COOKIE_NAME"], expires=0, httponly=True, domain=domain, @@ -221,7 +221,7 @@ def save_session(self, app, session, response): # clear access token if not elif user_sess_id != "" and user.id != user_sess_id: response.set_cookie( - config["ACCESS_TOKEN_COOKIE_NAME"], + key=config["ACCESS_TOKEN_COOKIE_NAME"], expires=0, httponly=True, domain=domain, @@ -250,14 +250,14 @@ def save_session(self, app, session, response): # expiration it just won't be stored in the cookie # anymore response.set_cookie( - app.config["SESSION_COOKIE_NAME"], + key=app.config["SESSION_COOKIE_NAME"], expires=0, httponly=True, domain=domain, secure=secure, ) response.set_cookie( - config["ACCESS_TOKEN_COOKIE_NAME"], + key=config["ACCESS_TOKEN_COOKIE_NAME"], expires=0, httponly=True, domain=domain, @@ -337,8 +337,8 @@ def _create_access_token_cookie(app, session, response, user): domain = app.session_interface.get_cookie_domain(app) response.set_cookie( - config["ACCESS_TOKEN_COOKIE_NAME"], - access_token, + key=config["ACCESS_TOKEN_COOKIE_NAME"], + value=access_token, expires=expiration, httponly=True, domain=domain, diff --git a/fence/scripting/fence_create.py b/fence/scripting/fence_create.py index 613afb73a..add1123a7 100644 --- a/fence/scripting/fence_create.py +++ b/fence/scripting/fence_create.py @@ -62,7 +62,7 @@ generate_client_credentials, get_SQLAlchemyDriver, ) - +from sqlalchemy.orm.attributes import flag_modified from gen3authz.client.arborist.client import ArboristClient logger = get_logger(__name__) @@ -100,15 +100,19 @@ def modify_client_action( if not clients: raise Exception("client {} does not exist".format(client_name)) for client in clients: + metadata = client.client_metadata if urls: if append: - client.redirect_uris += urls + metadata["redirect_uris"] += urls logger.info("Adding {} to urls".format(urls)) else: - client.redirect_uris = urls + if isinstance(urls, list): + metadata["redirect_uris"] = urls + else: + metadata["redirect_uris"] = [urls] logger.info("Changing urls to {}".format(urls)) if delete_urls: - client.redirect_uris = [] + metadata["redirect_uris"] = [] logger.info("Deleting urls") if set_auto_approve: client.auto_approve = True @@ -124,19 +128,21 @@ def modify_client_action( logger.info("Updating description to {}".format(description)) if allowed_scopes: if append: - new_scopes = client._allowed_scopes.split() + allowed_scopes - client._allowed_scopes = " ".join(new_scopes) + new_scopes = client.scope.split() + allowed_scopes + metadata["scope"] = " ".join(new_scopes) logger.info("Adding {} to allowed_scopes".format(allowed_scopes)) else: - client._allowed_scopes = " ".join(allowed_scopes) + metadata["scope"] = " ".join(allowed_scopes) logger.info("Updating allowed_scopes to {}".format(allowed_scopes)) if expires_in: client.expires_at = get_client_expires_at( - expires_in=expires_in, grant_types=client.grant_type + expires_in=expires_in, grant_types=client.grant_types ) + # Call setter on Json object to persist changes if any + client.set_client_metadata(metadata) s.commit() - if arborist is not None and policies: - arborist.update_client(client.client_id, policies) + if arborist is not None and policies: + arborist.update_client(client.client_id, policies) def create_client_action( @@ -210,10 +216,10 @@ def delete_expired_clients_action(DB, slack_webhook=None, warning_days=None): # to delete pass - def split_uris(uris): - if not uris: + def format_uris(uris): + if not uris or uris == [None]: return uris - return uris.split("\n") + return " ".join(uris) now = datetime.now().timestamp() driver = get_SQLAlchemyDriver(DB) @@ -229,7 +235,7 @@ def split_uris(uris): for client in clients: expired_messages.append( - f"Client '{client.name}' (ID '{client.client_id}') expired at {datetime.fromtimestamp(client.expires_at)} UTC. Redirect URIs: {split_uris(client.redirect_uri)})" + f"Client '{client.name}' (ID '{client.client_id}') expired at {datetime.fromtimestamp(client.expires_at)} UTC. Redirect URIs: {format_uris(client.redirect_uris)})" ) _remove_client_service_accounts(current_session, client) current_session.delete(client) @@ -251,7 +257,7 @@ def split_uris(uris): expiring_messages = ["Some OIDC clients are expiring soon!"] expiring_messages.extend( [ - f"Client '{client.name}' (ID '{client.client_id}') expires at {datetime.fromtimestamp(client.expires_at)} UTC. Redirect URIs: {split_uris(client.redirect_uri)}" + f"Client '{client.name}' (ID '{client.client_id}') expires at {datetime.fromtimestamp(client.expires_at)} UTC. Redirect URIs: {format_uris(client.redirect_uris)}" for client in expiring_clients ] ) @@ -312,7 +318,7 @@ def rotate_client_action(DB, client_name, expires_in=None): # the rest is identical to the client being rotated user=client.user, redirect_uris=client.redirect_uris, - _allowed_scopes=client._allowed_scopes, + allowed_scopes=client.scope, description=client.description, name=client.name, auto_approve=client.auto_approve, @@ -1111,7 +1117,7 @@ def _verify_google_service_account_member(session, access_group, member): class JWTCreator(object): required_kwargs = ["kid", "private_key", "username", "scopes"] - all_kwargs = required_kwargs + ["expires_in"] + all_kwargs = required_kwargs + ["expires_in", "client_id"] default_expiration = 3600 @@ -1125,6 +1131,7 @@ def __init__(self, db, base_url, **kwargs): self.private_key = None self.username = None self.scopes = None + self.client_id = None for required_kwarg in self.required_kwargs: if required_kwarg not in kwargs: @@ -1185,6 +1192,10 @@ def create_refresh_token(self): raise EnvironmentError( "no user found with given username: " + self.username ) + if not self.client_id: + raise EnvironmentError( + "no client id is provided. Required for creating refresh token" + ) jwt_result = generate_signed_refresh_token( self.kid, self.private_key, @@ -1192,6 +1203,7 @@ def create_refresh_token(self): self.expires_in, self.scopes, iss=self.base_url, + client_id=self.client_id, ) current_session.add( diff --git a/fence/sync/sync_users.py b/fence/sync/sync_users.py index 9b80495a2..b4262a4ce 100644 --- a/fence/sync/sync_users.py +++ b/fence/sync/sync_users.py @@ -537,7 +537,7 @@ def _parse_csv(self, file_dict, sess, dbgap_config={}, encrypted=True): """ user_projects = dict() - user_info = dict() + user_info = defaultdict(dict) # parse dbGaP sftp server information dbgap_key = dbgap_config.get("decrypt_key", None) @@ -572,6 +572,7 @@ def _parse_csv(self, file_dict, sess, dbgap_config={}, encrypted=True): ] # when converting the YAML from fence-config, python reads it as Python string literal. So "\" turns into "\\" which messes with the regex match project_id_patterns += patterns + self.logger.info(f"Using these file paths: {file_dict.items()}") for filepath, privileges in file_dict.items(): self.logger.info("Reading file {}".format(filepath)) if os.stat(filepath).st_size == 0: @@ -687,9 +688,9 @@ def _parse_csv(self, file_dict, sess, dbgap_config={}, encrypted=True): tags["pi"] = row["downloader for names"] user_info[username] = { - "email": row.get("email") or "", + "email": row.get("email") or user_info[username].get('email') or "", "display_name": display_name, - "phone_number": row.get("phone") or "", + "phone_number": row.get("phone") or user_info[username].get('phone_number') or "", "tags": tags, } @@ -1614,7 +1615,7 @@ def _download(self, dbgap_config): return dbgap_files except Exception as e: self.logger.error(e) - exit(1) + raise def _sync(self, sess): """ @@ -1638,6 +1639,8 @@ def _sync(self, sess): local_csv_file_list = glob.glob( os.path.join(self.sync_from_local_csv_dir, "*") ) + # Sort the list so the order of of files is consistent across platforms + local_csv_file_list.sort() user_projects_csv, user_info_csv = self._merge_multiple_local_csv_files( local_csv_file_list, diff --git a/fence/utils.py b/fence/utils.py index 21bfd4ac2..f17fc2d2b 100644 --- a/fence/utils.py +++ b/fence/utils.py @@ -19,7 +19,8 @@ from fence.models import Client, User, query_for_user from fence.errors import NotFound, UserError from fence.config import config - +from authlib.oauth2.rfc6749.util import scope_to_list +from authlib.oauth2.rfc6749.errors import InvalidScopeError rng = SystemRandom() alphanumeric = string.ascii_uppercase + string.ascii_lowercase + string.digits @@ -108,7 +109,7 @@ def create_client( client_secret=hashed_secret, user=user, redirect_uris=urls, - _allowed_scopes=" ".join(allowed_scopes), + allowed_scopes=" ".join(allowed_scopes), description=description, name=name, auto_approve=auto_approve, @@ -216,7 +217,7 @@ def clear_cookies(response): Set all cookies to empty and expired. """ for cookie_name in list(flask.request.cookies.keys()): - response.set_cookie(cookie_name, "", expires=0, httponly=True) + response.set_cookie(key=cookie_name, value="", expires=0, httponly=True) def get_error_params(error, description): @@ -517,3 +518,22 @@ def get_SQLAlchemyDriver(db_conn_url): "max_tries": config["DEFAULT_BACKOFF_SETTINGS_MAX_TRIES"], "giveup": exception_do_not_retry, } + + +def validate_scopes(request_scopes, client): + if not client: + raise Exception("Client object is None") + + if request_scopes: + scopes = scope_to_list(request_scopes) + # can we get some debug logs here that log the client, what scopes they have, and what scopes were requested + if not client.check_requested_scopes(set(scopes)): + logger.debug( + "Request Scope are " + + " ".join(scopes) + + " but client supported scopes are " + + client.scope + ) + raise InvalidScopeError("Failed to Authorize due to unsupported scope") + + return True diff --git a/migrations/models/migration_client.py b/migrations/models/migration_client.py new file mode 100644 index 000000000..bc818f09e --- /dev/null +++ b/migrations/models/migration_client.py @@ -0,0 +1,63 @@ +from authlib.integrations.sqla_oauth2 import OAuth2ClientMixin +from sqlalchemy import Boolean, Column, Integer, String, Text, func +from sqlalchemy.orm import Session, backref, relationship +from sqlalchemy.schema import ForeignKey +from userdatamodel import Base +from userdatamodel.models import User +import time + +# This needs to be in a different file +# Otherwise SqlAlchemy would import this multiple times and then complain about metadata conflict +class MigrationClient(Base): + + __tablename__ = "migration_client" + + client_id = Column(String(48), primary_key=True, index=True) + # this is hashed secret + client_secret = Column(String(120), unique=True, index=True, nullable=True) + + # human readable name + name = Column(String(40), nullable=False) + + # human readable description, not required + description = Column(String(400)) + + # required if you need to support client credential + user_id = Column(Integer) + + # this is for internal microservices to skip user grant + auto_approve = Column(Boolean, default=False) + + # public or confidential + is_confidential = Column(Boolean, default=True) + + issued_at = Column(Integer, nullable=False, default=lambda: int(time.time())) + + expires_at = Column(Integer, nullable=False, default=0) + + _redirect_uris = Column(Text) + + _allowed_scopes = Column(Text, nullable=False, default="") + + # Deprecated, keeping these around in case it is needed later + _default_scopes = Column(Text) + _scopes = ["compute", "storage", "user"] + + redirect_uri = Column(Text) + token_endpoint_auth_method = Column(String(48), default="client_secret_basic") + grant_type = Column(Text, nullable=False, default="") + response_type = Column(Text, nullable=False, default="") + scope = Column(Text, nullable=False, default="") + + client_name = Column(String(100)) + client_uri = Column(Text) + logo_uri = Column(Text) + contact = Column(Text) + tos_uri = Column(Text) + policy_uri = Column(Text) + jwks_uri = Column(Text) + jwks_text = Column(Text) + i18n_metadata = Column(Text) + + software_id = Column(String(36)) + software_version = Column(String(48)) diff --git a/migrations/versions/9b3a5a7145d7_authlib_update_1_2_1.py b/migrations/versions/9b3a5a7145d7_authlib_update_1_2_1.py new file mode 100644 index 000000000..05a3c7200 --- /dev/null +++ b/migrations/versions/9b3a5a7145d7_authlib_update_1_2_1.py @@ -0,0 +1,330 @@ +"""authlib update 1.2.1 + +Revision ID: 9b3a5a7145d7 +Revises: a04a70296688 +Create Date: 2023-09-01 10:27:16.686456 + +""" +from alembic import op +import logging +import sqlalchemy as sa +from sqlalchemy.orm import Session +from sqlalchemy.sql import text + +import json +from authlib.common.encoding import json_loads, json_dumps + +from migrations.models.migration_client import MigrationClient +from fence.models import Client + +# revision identifiers, used by Alembic. +revision = "9b3a5a7145d7" # pragma: allowlist secret +down_revision = "a04a70296688" # pragma: allowlist secret +branch_labels = None +depends_on = None + +logger = logging.getLogger("fence.alembic") + + +def upgrade(): + # Remove google_service_account_client_id_fkey if it exists + remove_foreign_key_constraint_if_exists(op) + temp_table_name = "migration_client" + # Make a copy of client table + copy_client_to_temp_table_and_clear_data(op, temp_table_name) + + # Add new columns for client table + op.add_column("client", sa.Column("client_metadata", sa.Text(), nullable=True)) + op.add_column( + "client", + sa.Column( + "client_secret_expires_at", sa.Integer(), nullable=False, server_default="0" + ), + ) + + # Modify columns for client table + op.alter_column("client", "issued_at", new_column_name="client_id_issued_at") + op.alter_column("client", "client_id", nullable=False, type_=sa.String(48)) + op.alter_column("client", "client_secret", nullable=True, type_=sa.String(120)) + + # Delete old columns for client table + op.drop_column("client", "redirect_uri") + op.drop_column("client", "token_endpoint_auth_method") + op.drop_column("client", "grant_type") + op.drop_column("client", "response_type") + op.drop_column("client", "scope") + op.drop_column("client", "client_name") + op.drop_column("client", "client_uri") + op.drop_column("client", "logo_uri") + op.drop_column("client", "contact") + op.drop_column("client", "tos_uri") + op.drop_column("client", "policy_uri") + op.drop_column("client", "jwks_uri") + op.drop_column("client", "jwks_text") + op.drop_column("client", "i18n_metadata") + op.drop_column("client", "software_id") + op.drop_column("client", "software_version") + op.drop_column("client", "_allowed_scopes") + op.drop_column("client", "_redirect_uris") + + transform_client_data(op) + + # Drop temp table + op.drop_table(temp_table_name) + + # Add New Columns for authorization_code Table + op.add_column( + "authorization_code", sa.Column("code_challenge", sa.Text(), nullable=True) + ) + op.add_column( + "authorization_code", + sa.Column("code_challenge_method", sa.String(length=48), nullable=True), + ) + + +def downgrade(): + + temp_table_name = "migration_client" + # Make a copy of client table + copy_client_to_temp_table_and_clear_data(op, temp_table_name) + + # Add Old Columns Back + op.add_column("client", sa.Column("redirect_uri", sa.Text(), nullable=True)) + op.add_column( + "client", + sa.Column("token_endpoint_auth_method", sa.String(length=48), nullable=True), + ) + op.add_column( + "client", sa.Column("grant_type", sa.Text(), nullable=False, server_default="") + ) + op.add_column( + "client", + sa.Column("response_type", sa.Text(), nullable=False, server_default=""), + ) + op.add_column( + "client", sa.Column("scope", sa.Text(), nullable=False, server_default="") + ) + op.add_column( + "client", sa.Column("client_name", sa.String(length=100), nullable=True) + ) + op.add_column("client", sa.Column("client_uri", sa.Text(), nullable=True)) + op.add_column("client", sa.Column("logo_uri", sa.Text(), nullable=True)) + op.add_column("client", sa.Column("contact", sa.Text(), nullable=True)) + op.add_column("client", sa.Column("tos_uri", sa.Text(), nullable=True)) + op.add_column("client", sa.Column("policy_uri", sa.Text(), nullable=True)) + op.add_column("client", sa.Column("jwks_uri", sa.Text(), nullable=True)) + op.add_column("client", sa.Column("jwks_text", sa.Text(), nullable=True)) + op.add_column("client", sa.Column("i18n_metadata", sa.Text(), nullable=True)) + op.add_column( + "client", sa.Column("software_id", sa.String(length=36), nullable=True) + ) + op.add_column( + "client", sa.Column("software_version", sa.String(length=48), nullable=True) + ) + op.add_column( + "client", + sa.Column("_allowed_scopes", sa.Text(), nullable=False, server_default=""), + ) + op.add_column("client", sa.Column("_redirect_uris", sa.Text(), nullable=True)) + + # Modify Columns for client Table + op.alter_column("client", "client_id_issued_at", new_column_name="issued_at") + op.alter_column("client", "client_id", nullable=False, type_=sa.String(40)) + op.alter_column("client", "client_secret", nullable=True, type_=sa.String(60)) + + # Drop New Columns for client Table + op.drop_column("client", "client_metadata") + op.drop_column("client", "client_secret_expires_at") + + # Set value of old columns + set_old_column_values() + op.drop_table(temp_table_name) + + # Remove New Columns for authorization_code Table + op.drop_column("authorization_code", "code_challenge") + op.drop_column("authorization_code", "code_challenge_method") + + +def copy_client_to_temp_table_and_clear_data(op, temp_table_name: str): + """Copy client table schema and data into temp table""" + conn = op.get_bind() + session = Session(bind=conn) + # Drop temp table if it already exists + # copy client table with all table metadata then copy all row data + session.execute("DROP TABLE IF EXISTS " + temp_table_name + ";") + session.execute("CREATE TABLE " + temp_table_name + " (LIKE client INCLUDING ALL);") + session.execute("INSERT INTO " + temp_table_name + " SELECT * FROM client;") + session.execute("Truncate client;") + session.commit() + + +def transform_client_data(op): + conn = op.get_bind() + session = Session(bind=conn) + + for client in session.query(MigrationClient).all(): + if client.i18n_metadata: + metadata = json.loads(client.i18n_metadata) + else: + metadata = {} + + if client.redirect_uri: + metadata["redirect_uris"] = client.redirect_uri.splitlines() + if client.token_endpoint_auth_method: + metadata["token_endpoint_auth_method"] = client.token_endpoint_auth_method + if client.grant_type: + metadata["grant_types"] = client.grant_type.splitlines() + if client.response_type: + metadata["response_types"] = client.response_type.splitlines() + if client.client_uri: + metadata["client_uri"] = client.client_uri + if client.logo_uri: + metadata["logo_uri"] = client.logo_uri + if client.contact: + metadata["contact"] = client.contact + if client.tos_uri: + metadata["tos_uri"] = client.tos_uri + if client.policy_uri: + metadata["policy_uri"] = client.policy_uri + if client.jwks_uri: + metadata["jwks_uri"] = client.jwks_uri + if client.jwks_text: + metadata["jwks_text"] = client.jwks_text + if client.software_id: + metadata["software_id"] = client.software_id + if client.software_version: + metadata["software_version"] = client.software_version + + new_client = Client( + client_id=client.client_id, + client_secret=client.client_secret, + name=client.name, + description=client.description, + allowed_scopes=client._allowed_scopes.split(" "), + user_id=client.user_id, + auto_approve=client.auto_approve, + is_confidential=client.is_confidential, + expires_at=client.expires_at, + _default_scopes=client._default_scopes, + grant_types=client.grant_type.splitlines(), + response_types=client.response_type.splitlines(), + client_id_issued_at=client.issued_at, + _client_metadata=json_dumps(metadata), + ) + + session.add(new_client) + + session.commit() + + +def set_old_column_values(): + conn = op.get_bind() + session = Session(bind=conn) + clientDatas = [] + + rs = session.execute("SELECT * FROM migration_client") + for client in rs: + data = {} + data["client_id"] = client.client_id + data["client_secret"] = client.client_secret + data["name"] = client.name + data["description"] = client.description + data["user_id"] = client.user_id + data["auto_approve"] = client.auto_approve + data["is_confidential"] = client.is_confidential + data["expires_at"] = client.expires_at + data["issued_at"] = client.client_id_issued_at + data["_default_scopes"] = client._default_scopes + data["_redirect_uris"] = None # Deprecated + data["scope"] = "" # Deprecated + data["client_name"] = None + + if client.client_metadata: + metadata = json_loads(client.client_metadata) + data["i18n_metadata"] = client.client_metadata + else: + metadata = {} + data["i18n_metadata"] = None + + if metadata.get("redirect_uris"): + data["redirect_uri"] = "\n".join( + [item for item in metadata.get("redirect_uris") if item] + ) + else: + data["redirect_uri"] = "" + + data["token_endpoint_auth_method"] = metadata.get("token_endpoint_auth_method") + data["_allowed_scopes"] = metadata.get("scope") + + if metadata.get("grant_types"): + data["grant_type"] = "\n".join( + [item for item in metadata.get("grant_types") if item] + ) + else: + data["grant_type"] = "" + + if metadata.get("response_types"): + data["response_type"] = "\n".join( + [item for item in metadata.get("response_types") if item] + ) + else: + data["response_type"] = "" + + data["client_uri"] = metadata.get("client_uri") + data["logo_uri"] = metadata.get("logo_uri") + data["contact"] = metadata.get("contact") + data["tos_uri"] = metadata.get("tos_uri") + data["policy_uri"] = metadata.get("policy_uri") + data["jwks_uri"] = metadata.get("jwks_uri") + data["jwks_text"] = metadata.get("jwks_text") + data["software_id"] = metadata.get("software_id") + data["software_version"] = metadata.get("software_version") + + clientDatas.append(data) + + statement = text( + """INSERT INTO client(client_id, client_secret, name, description, + user_id, auto_approve, is_confidential, issued_at, expires_at, _redirect_uris, _allowed_scopes, + _default_scopes, redirect_uri, token_endpoint_auth_method, grant_type, response_type, scope, + client_name,client_uri,logo_uri,contact,tos_uri,policy_uri,jwks_uri,jwks_text,i18n_metadata, + software_id,software_version) + VALUES( :client_id, :client_secret, :name, :description, :user_id, :auto_approve, :is_confidential, :issued_at, + :expires_at, :_redirect_uris, :_allowed_scopes, :_default_scopes, :redirect_uri, :token_endpoint_auth_method, + :grant_type, :response_type, :scope, :client_name, :client_uri, :logo_uri, :contact, :tos_uri, :policy_uri, + :jwks_uri, :jwks_text, :i18n_metadata, :software_id, :software_version)""" + ) + + for data in clientDatas: + conn.execute(statement, **data) + + session.commit() + + +def remove_foreign_key_constraint_if_exists(op): + """ + Pre-alembic era created a foreign key clent_id(from the client table) on the google_service_account table. + This foreign key was then removed from the schema but any commons created before the constraint was removed + still held the foreign key. + The previous alembic migration tuncates the client table but this fails if the foreign key constraint still persists + therefore failing the migration. + This migration checks for the existence of the foreign key constraint and removes it if it exists. + There is no downgrade path for this since not having the foreign key constraint is the correct schema throughout all versions. + This migration is specifically for commons that were created before the foreign key constraint was removed + """ + conn = op.get_bind() + inspector = sa.inspect(conn) + foreign_keys = inspector.get_foreign_keys("google_service_account") + fk_exists = False + for fk in foreign_keys: + if "client_id" in fk["constrained_columns"]: + fk_exists = True + + if fk_exists: + logger.info("Foreign key client_id exists. Removing constraint...") + op.drop_constraint( + "google_service_account_client_id_fkey", + "google_service_account", + type_="foreignkey", + ) + else: + logger.debug("Foreign key client_id does not exist. This is expected for newer versions of the service.") diff --git a/openapis/swagger.yaml b/openapis/swagger.yaml index 70d1f8e1a..17ea7eb17 100644 --- a/openapis/swagger.yaml +++ b/openapis/swagger.yaml @@ -68,6 +68,18 @@ paths: description: successful operation schema: $ref: '#/definitions/SystemVersionOutputRef' + /metrics: + get: + tags: + - system + summary: Get Prometheus metrics + description: >- + Returns Prometheus metrics if the `ENABLE_PROMETHEUS_METRICS` setting is `True`. + By default, this endpoint is public. Authorization controls can be setup externally; + in cloud-automation setups, access to this endpoint is blocked at the revproxy level. + responses: + '200': + description: successful operation /oauth2/authorize: get: tags: @@ -565,6 +577,8 @@ paths: Previous authorization check requires a more general, global upload permission: "file_upload" on "/data_file" resource. When "authz" is *not* provided, this endpoint will check for that permission for your user. + + Accepts a "guid" field in the request body. If "guid" is provided, it checks indexd for an existing record. If not found, it raises a 404. security: - OAuth2: - user @@ -590,6 +604,8 @@ paths: url: type: string description: the presigned URL usable for data upload + 404: + description: Record with not found. '/data/upload/{file_id}': get: tags: @@ -666,7 +682,10 @@ paths: flow, Fence needs to provide a list of endpoints for supporting multipart upload presigned url This is the first step on the API side for the multipart upload presigned url. This endpoint causes fence to make a request to indexd to create a new, blank index record, and returns - the GUID for this new record and an uploadId for multipart upload presigned url + the GUID for this new record and an uploadId for multipart upload presigned url. + + + Accepts a "guid" field in the request body. If "guid" is provided, it checks indexd for an existing record. If not found, it raises a 404. security: - OAuth2: - user @@ -692,6 +711,8 @@ paths: uploadId: type: string description: the uploadId for multipart upload presigned URL usable for data upload + 404: + description: Record with not found. '/multipart/upload': post: @@ -1686,6 +1707,10 @@ components: description: >- the requested bucket to upload to. If not provided, defaults to the configured DATA_UPLOAD_BUCKET. + guid: + type: string + required: false + description: GUID to be assigned to the object expires_in: type: integer description: optional integer specifying the presigned URL lifetime @@ -1696,6 +1721,7 @@ components: description: requested authorization resources to be set on the resulting indexed record. You must have proper authorization to set this example: + guid: "123456abcd" file_name: "my_file.bam" bucket: "bucket1" expires_in: 1200 @@ -1708,6 +1734,10 @@ components: type: string required: true description: the file name to use for this upload + guid: + type: string + required: false + description: GUID to be assigned to the object expires_in: type: integer description: optional integer specifying the presigned URL lifetime diff --git a/poetry.lock b/poetry.lock index 5c5f490b7..418229818 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,13 +13,13 @@ files = [ [[package]] name = "alembic" -version = "1.13.1" +version = "1.13.2" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.8" files = [ - {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, - {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, + {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, + {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, ] [package.dependencies] @@ -46,13 +46,13 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] [[package]] name = "anyio" -version = "4.3.0" +version = "4.4.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, ] [package.dependencies] @@ -78,64 +78,60 @@ files = [ [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] -name = "Authlib" -version = "0.11" -description = "The ultimate Python library in building OAuth and OpenID Connect servers." +name = "authlib" +version = "1.3.2" +description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = false -python-versions = "*" -files = [] -develop = false +python-versions = ">=3.8" +files = [ + {file = "Authlib-1.3.2-py2.py3-none-any.whl", hash = "sha256:ede026a95e9f5cdc2d4364a52103f5405e75aa156357e831ef2bfd0bc5094dfc"}, + {file = "authlib-1.3.2.tar.gz", hash = "sha256:4b16130117f9eb82aa6eec97f6dd4673c3f960ac0283ccdae2897ee4bc030ba2"}, +] [package.dependencies] cryptography = "*" -requests = "*" - -[package.source] -type = "git" -url = "https://github.com/uc-cdis/authlib" -reference = "v0.11_CVE_patch_v1" -resolved_reference = "80345f2877ec2a1a29468aa465c07623347c3ef6" [[package]] name = "authutils" -version = "6.2.2" +version = "6.2.5" description = "Gen3 auth utility functions" optional = false -python-versions = ">=3.9,<4.0" +python-versions = "<4.0,>=3.9" files = [ - {file = "authutils-6.2.2-py3-none-any.whl", hash = "sha256:df9b551b4ab561452f0f4b50edaddccc443905b4d77ee69ea7eea78938e7caed"}, - {file = "authutils-6.2.2.tar.gz", hash = "sha256:ded3e5c0e35160eab83bfb217976920396441e19ed977acacbb769e988323850"}, + {file = "authutils-6.2.5-py3-none-any.whl", hash = "sha256:ef91c9c7c750123c28b7376be9ca00b4e89b2d52fa183dec9bfe681d8eac6227"}, + {file = "authutils-6.2.5.tar.gz", hash = "sha256:0d496721e9f0d8c69b34aff8f6fccdc7768ca4f104504d68e70fd647d4c23b19"}, ] [package.dependencies] -authlib = "0.11.0" +authlib = ">=1.1.0" cached-property = ">=1.4,<2.0" cdiserrors = "<2.0.0" +cryptography = ">=41.0.6" httpx = ">=0.23.0,<1.0.0" pyjwt = {version = ">=2.4.0,<3.0", extras = ["crypto"]} xmltodict = ">=0.9,<1.0" [package.extras] fastapi = ["fastapi (>=0.65.2,<0.66.0)"] -flask = ["Flask (>=0.10.1)"] +flask = ["Flask (<=2.3.3)"] [[package]] name = "aws-xray-sdk" @@ -155,13 +151,13 @@ wrapt = "*" [[package]] name = "azure-core" -version = "1.30.1" +version = "1.30.2" description = "Microsoft Azure Core Library for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "azure-core-1.30.1.tar.gz", hash = "sha256:26273a254131f84269e8ea4464f3560c731f29c0c1f69ac99010845f239c1a8f"}, - {file = "azure_core-1.30.1-py3-none-any.whl", hash = "sha256:7c5ee397e48f281ec4dd773d67a0a47a0962ed6fa833036057f9ea067f688e74"}, + {file = "azure-core-1.30.2.tar.gz", hash = "sha256:a14dc210efcd608821aa472d9fb8e8d035d29b68993819147bc290a8ac224472"}, + {file = "azure_core-1.30.2-py3-none-any.whl", hash = "sha256:cf019c1ca832e96274ae85abd3d9f752397194d9fea3b41487290562ac8abe4a"}, ] [package.dependencies] @@ -174,23 +170,23 @@ aio = ["aiohttp (>=3.0)"] [[package]] name = "azure-storage-blob" -version = "12.19.1" +version = "12.22.0" description = "Microsoft Azure Blob Storage Client Library for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "azure-storage-blob-12.19.1.tar.gz", hash = "sha256:13e16ba42fc54ac2c7e8f976062173a5c82b9ec0594728e134aac372965a11b0"}, - {file = "azure_storage_blob-12.19.1-py3-none-any.whl", hash = "sha256:c5530dc51c21c9564e4eb706cd499befca8819b10dd89716d3fc90d747556243"}, + {file = "azure-storage-blob-12.22.0.tar.gz", hash = "sha256:b3804bb4fe8ab1c32771fa464053da772a682c2737b19da438a3f4e5e3b3736e"}, + {file = "azure_storage_blob-12.22.0-py3-none-any.whl", hash = "sha256:bb7d2d824ce3f11f14a27ee7d9281289f7e072ac8311c52e3652672455b7d5e8"}, ] [package.dependencies] -azure-core = ">=1.28.0,<2.0.0" +azure-core = ">=1.28.0" cryptography = ">=2.1.4" isodate = ">=0.6.1" -typing-extensions = ">=4.3.0" +typing-extensions = ">=4.6.0" [package.extras] -aio = ["azure-core[aio] (>=1.28.0,<2.0.0)"] +aio = ["azure-core[aio] (>=1.28.0)"] [[package]] name = "backoff" @@ -232,13 +228,13 @@ typecheck = ["mypy"] [[package]] name = "blinker" -version = "1.8.1" +version = "1.8.2" description = "Fast, simple object-to-object and broadcast signaling" optional = false python-versions = ">=3.8" files = [ - {file = "blinker-1.8.1-py3-none-any.whl", hash = "sha256:5f1cdeff423b77c31b89de0565cd03e5275a03028f44b2b15f912632a58cced6"}, - {file = "blinker-1.8.1.tar.gz", hash = "sha256:da44ec748222dcd0105ef975eed946da197d5bdf8bafb6aa92f5bc89da63fa25"}, + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, ] [[package]] @@ -254,17 +250,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.94" +version = "1.35.7" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.94-py3-none-any.whl", hash = "sha256:bbb87d641c73462e53b1777083b55c8f13921618ad08757478a8122985c56c13"}, - {file = "boto3-1.34.94.tar.gz", hash = "sha256:22f65b3c9b7a419f8f39c2dddc421e14fab8cbb3bd8a9d467e874237d39f59b1"}, + {file = "boto3-1.35.7-py3-none-any.whl", hash = "sha256:bfbdf7c8f2e3eb70e4309cdcf5c9c7940e1fed4f645cdfb52581e7e67d3c8cab"}, + {file = "boto3-1.35.7.tar.gz", hash = "sha256:05bd349cf260ba177924f38d721e427e2b3a6dd0fa8a18fa4ffc1b889633b181"}, ] [package.dependencies] -botocore = ">=1.34.94,<1.35.0" +botocore = ">=1.35.7,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -273,13 +269,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.94" +version = "1.35.7" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.94-py3-none-any.whl", hash = "sha256:f00a79002e0cb9d6895ecd0919c506402850177d7b6c4d2634fa2da362d95bcb"}, - {file = "botocore-1.34.94.tar.gz", hash = "sha256:99b11be9a28f9051af4c96fa121e9c3f22a86d499abd773c9e868b2a38961bae"}, + {file = "botocore-1.35.7-py3-none-any.whl", hash = "sha256:324e58518a92f2946bc6653e5e1272bc88d4b6313413f938bdc51cb90d34cbba"}, + {file = "botocore-1.35.7.tar.gz", hash = "sha256:85e4b58f2c6e54dfbf52eaee72ebc9b70188fd1716d47f626874abadcee45512"}, ] [package.dependencies] @@ -291,7 +287,7 @@ urllib3 = [ ] [package.extras] -crt = ["awscrt (==0.20.9)"] +crt = ["awscrt (==0.21.2)"] [[package]] name = "cached-property" @@ -317,13 +313,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.3" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] @@ -355,18 +351,18 @@ files = [ [[package]] name = "cdispyutils" -version = "2.0.1" +version = "2.1.0" description = "This package includes several utility Python tools for the Gen3 stack." optional = false -python-versions = ">=3.6,<4.0" +python-versions = "<4.0,>=3.9" files = [ - {file = "cdispyutils-2.0.1-py3-none-any.whl", hash = "sha256:9a269014c657c87830e00d9b581280bfbe57a8708bbf3e0cf21a141d3810ab06"}, - {file = "cdispyutils-2.0.1.tar.gz", hash = "sha256:b6bfef5b3c77afe1d7705124d021eb579b500f9fcc07a66dc0f8fe8d130e6c23"}, + {file = "cdispyutils-2.1.0-py3-none-any.whl", hash = "sha256:ca1310ebb7e1b971d183823a8294e1dc9d8d55f862aa3c83e0feac7125364308"}, + {file = "cdispyutils-2.1.0.tar.gz", hash = "sha256:1c87830ea1d537f8479364b9473cca037f5ac8906e874471920f69622e3a4431"}, ] [package.dependencies] -cdiserrors = ">=1.0.0,<2.0.0" -cryptography = ">=3.2" +cdiserrors = "*" +cryptography = "*" Flask = "*" PyJWT = "*" requests = "*" @@ -388,74 +384,89 @@ resolved_reference = "74a607736ca4af5ec35f17830ab9b78b5db15837" [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.0" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, + {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, + {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, + {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, + {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, + {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, + {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, + {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, + {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, + {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, + {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, + {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, + {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, + {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, + {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, ] [package.dependencies] @@ -688,43 +699,38 @@ yaml = ["PyYAML (>=3.10)"] [[package]] name = "cryptography" -version = "42.0.5" +version = "43.0.0" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, - {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, - {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, - {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, - {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, - {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, - {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, + {file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"}, + {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"}, + {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"}, + {file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"}, + {file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"}, + {file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"}, + {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"}, + {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"}, + {file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"}, + {file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"}, + {file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"}, ] [package.dependencies] @@ -737,7 +743,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.0)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -773,22 +779,23 @@ wmi = ["wmi (>=1.5.1)"] [[package]] name = "docker" -version = "7.0.0" +version = "7.1.0" description = "A Python library for the Docker Engine API." optional = false python-versions = ">=3.8" files = [ - {file = "docker-7.0.0-py3-none-any.whl", hash = "sha256:12ba681f2777a0ad28ffbcc846a69c31b4dfd9752b47eb425a274ee269c5e14b"}, - {file = "docker-7.0.0.tar.gz", hash = "sha256:323736fb92cd9418fc5e7133bc953e11a9da04f4483f828b527db553f1e7e5a3"}, + {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, + {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, ] [package.dependencies] -packaging = ">=14.0" pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} requests = ">=2.26.0" urllib3 = ">=1.26.0" [package.extras] +dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] +docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] ssh = ["paramiko (>=2.4.3)"] websockets = ["websocket-client (>=1.3.0)"] @@ -837,13 +844,13 @@ idna = ">=2.0.0" [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -851,13 +858,13 @@ test = ["pytest (>=6)"] [[package]] name = "flask" -version = "2.3.2" +version = "3.0.3" description = "A simple framework for building complex web applications." optional = false python-versions = ">=3.8" files = [ - {file = "Flask-2.3.2-py3-none-any.whl", hash = "sha256:77fd4e1249d8c9923de34907236b747ced06e5467ecac1a7bb7115ae0e9670b0"}, - {file = "Flask-2.3.2.tar.gz", hash = "sha256:8c2f9abd47a9e8df7f0c3f091ce9497d011dc3b31effcf4c85a6e2b50f4114ef"}, + {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, + {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, ] [package.dependencies] @@ -866,7 +873,7 @@ click = ">=8.1.3" importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} itsdangerous = ">=2.1.2" Jinja2 = ">=3.1.2" -Werkzeug = ">=2.3.3" +Werkzeug = ">=3.0.0" [package.extras] async = ["asgiref (>=3.2)"] @@ -874,13 +881,13 @@ dotenv = ["python-dotenv"] [[package]] name = "flask-cors" -version = "4.0.0" +version = "4.0.1" description = "A Flask extension adding a decorator for CORS support" optional = false python-versions = "*" files = [ - {file = "Flask-Cors-4.0.0.tar.gz", hash = "sha256:f268522fcb2f73e2ecdde1ef45e2fd5c71cc48fe03cffb4b441c6d1b40684eb0"}, - {file = "Flask_Cors-4.0.0-py2.py3-none-any.whl", hash = "sha256:bc3492bfd6368d27cfe79c7821df5a8a319e1a6d5eab277a3794be19bdc51783"}, + {file = "Flask_Cors-4.0.1-py2.py3-none-any.whl", hash = "sha256:f2a704e4458665580c074b714c4627dd5a306b333deb9074d0b1794dfa2fb677"}, + {file = "flask_cors-4.0.1.tar.gz", hash = "sha256:eeb69b342142fdbf4766ad99357a7f3876a2ceb77689dc10ff912aac06c389e4"}, ] [package.dependencies] @@ -954,17 +961,18 @@ six = ">=1.16.0,<2.0.0" [[package]] name = "gen3cirrus" -version = "3.0.1" +version = "3.1.0" description = "" optional = false -python-versions = ">=3.9,<4.0" +python-versions = "<4.0,>=3.9" files = [ - {file = "gen3cirrus-3.0.1-py3-none-any.whl", hash = "sha256:74628faca3b1cbe65c78e08eb567e1ac0cb8ae52e1bfc603f904af0277e3cb52"}, - {file = "gen3cirrus-3.0.1.tar.gz", hash = "sha256:0ae0ddc0ee7df870603457fe186245f3c8124d989254276e5011a23e1139a6c8"}, + {file = "gen3cirrus-3.1.0-py3-none-any.whl", hash = "sha256:42c89d1579d7d89c87c5c355815197e1dbf8045a2030310a6b2f6ca089a74fde"}, + {file = "gen3cirrus-3.1.0.tar.gz", hash = "sha256:81e5a0a4b5dc2d820ad3351bd5326151424806bc8f64e022100320f01027a310"}, ] [package.dependencies] backoff = "*" +boto3 = "*" cdislogging = "*" google-api-python-client = "*" google-auth = "*" @@ -991,36 +999,37 @@ six = "*" [[package]] name = "gen3users" -version = "1.0.3" +version = "1.1.1" description = "Utils for Gen3 Commons user management" optional = false -python-versions = ">=3.9,<4.0" +python-versions = "<4.0,>=3.9" files = [ - {file = "gen3users-1.0.3-py3-none-any.whl", hash = "sha256:faf07717b7df28ea2c25a308e49c65d8ed69e14945c6f36e99deb697240bb8bb"}, - {file = "gen3users-1.0.3.tar.gz", hash = "sha256:a2269433ab886c23db37050144821405c7d5dfcbbadccc43302611aad9e34525"}, + {file = "gen3users-1.1.1-py3-none-any.whl", hash = "sha256:5a38ba90c8cef5f7c4ed6ae2f1f1d733524d48b1b2c60e66db8537e36194faab"}, + {file = "gen3users-1.1.1.tar.gz", hash = "sha256:6636ff127ce145f9104fc72358dd17de54b19be19ae45b89e13876c0adcf4ba0"}, ] [package.dependencies] cdislogging = ">=1,<2" click = "*" pyyaml = ">=6,<7" +requests = "*" [[package]] name = "google-api-core" -version = "2.19.0" +version = "2.19.1" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.19.0.tar.gz", hash = "sha256:cf1b7c2694047886d2af1128a03ae99e391108a08804f87cfd35970e49c9cd10"}, - {file = "google_api_core-2.19.0-py3-none-any.whl", hash = "sha256:8661eec4078c35428fd3f69a2c7ee29e342896b70f01d1a1cbcb334372dd6251"}, + {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"}, + {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"}, ] [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" [package.extras] @@ -1030,13 +1039,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.127.0" +version = "2.142.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.127.0.tar.gz", hash = "sha256:bbb51b0fbccdf40e536c26341e372d7800f09afebb53103bbcc94e08f14b523b"}, - {file = "google_api_python_client-2.127.0-py2.py3-none-any.whl", hash = "sha256:d01c70c7840ec37888aa02b1aea5d9baba4c1701e268d1a0251640afd56e5e90"}, + {file = "google_api_python_client-2.142.0-py2.py3-none-any.whl", hash = "sha256:266799082bb8301f423ec204dffbffb470b502abbf29efd1f83e644d36eb5a8f"}, + {file = "google_api_python_client-2.142.0.tar.gz", hash = "sha256:a1101ac9e24356557ca22f07ff48b7f61fa5d4b4e7feeef3bda16e5dcb86350e"}, ] [package.dependencies] @@ -1048,13 +1057,13 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.29.0" +version = "2.34.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, - {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, + {file = "google_auth-2.34.0-py2.py3-none-any.whl", hash = "sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65"}, + {file = "google_auth-2.34.0.tar.gz", hash = "sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc"}, ] [package.dependencies] @@ -1064,7 +1073,7 @@ rsa = ">=3.1.4,<5" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] requests = ["requests (>=2.20.0,<3.0.0.dev0)"] @@ -1104,13 +1113,13 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] [[package]] name = "google-cloud-storage" -version = "2.16.0" +version = "2.18.2" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-storage-2.16.0.tar.gz", hash = "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f"}, - {file = "google_cloud_storage-2.16.0-py2.py3-none-any.whl", hash = "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852"}, + {file = "google_cloud_storage-2.18.2-py2.py3-none-any.whl", hash = "sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166"}, + {file = "google_cloud_storage-2.18.2.tar.gz", hash = "sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99"}, ] [package.dependencies] @@ -1118,11 +1127,12 @@ google-api-core = ">=2.15.0,<3.0.0dev" google-auth = ">=2.26.1,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" google-crc32c = ">=1.0,<2.0dev" -google-resumable-media = ">=2.6.0" +google-resumable-media = ">=2.7.2" requests = ">=2.18.0,<3.0.0dev" [package.extras] -protobuf = ["protobuf (<5.0.0dev)"] +protobuf = ["protobuf (<6.0.0dev)"] +tracing = ["opentelemetry-api (>=1.1.0)"] [[package]] name = "google-crc32c" @@ -1206,13 +1216,13 @@ testing = ["pytest"] [[package]] name = "google-resumable-media" -version = "2.7.0" +version = "2.7.2" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = false -python-versions = ">= 3.7" +python-versions = ">=3.7" files = [ - {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, - {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, + {file = "google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa"}, + {file = "google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0"}, ] [package.dependencies] @@ -1224,17 +1234,17 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.63.0" +version = "1.65.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, - {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, + {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, + {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, ] [package.dependencies] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] @@ -1358,13 +1368,13 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0 [[package]] name = "httpx" -version = "0.27.0" +version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, ] [package.dependencies] @@ -1379,36 +1389,37 @@ brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "idna" -version = "2.10" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" files = [ - {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, - {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] name = "importlib-metadata" -version = "7.1.0" +version = "8.4.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "isodate" @@ -1437,13 +1448,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.3" +version = "3.1.4" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] @@ -1475,13 +1486,13 @@ files = [ [[package]] name = "jsonpickle" -version = "3.0.4" -description = "Serialize any Python object to JSON" +version = "3.2.2" +description = "Python library for serializing arbitrary object graphs into JSON" optional = false python-versions = ">=3.7" files = [ - {file = "jsonpickle-3.0.4-py3-none-any.whl", hash = "sha256:04ae7567a14269579e3af66b76bda284587458d7e8a204951ca8f71a3309952e"}, - {file = "jsonpickle-3.0.4.tar.gz", hash = "sha256:a1b14c8d6221cd8f394f2a97e735ea1d7edc927fbd135b26f2f8700657c8c62b"}, + {file = "jsonpickle-3.2.2-py3-none-any.whl", hash = "sha256:87cd82d237fd72c5a34970e7222dddc0accc13fddf49af84111887ed9a9445aa"}, + {file = "jsonpickle-3.2.2.tar.gz", hash = "sha256:d425fd2b8afe9f5d7d57205153403fbf897782204437882a477e8eed60930f8c"}, ] [package.extras] @@ -1491,13 +1502,13 @@ testing = ["bson", "ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", [[package]] name = "mako" -version = "1.3.3" +version = "1.3.5" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" files = [ - {file = "Mako-1.3.3-py3-none-any.whl", hash = "sha256:5324b88089a8978bf76d1629774fcc2f1c07b82acdf00f4c5dd8ceadfffc4b40"}, - {file = "Mako-1.3.3.tar.gz", hash = "sha256:e16c01d9ab9c11f7290eef1cfefc093fb5a45ee4a3da09e2fec2e4d1bae54e73"}, + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, ] [package.dependencies] @@ -1510,13 +1521,13 @@ testing = ["pytest"] [[package]] name = "markdown" -version = "3.6" +version = "3.7" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, - {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, + {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, + {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, ] [package.dependencies] @@ -1597,13 +1608,13 @@ files = [ [[package]] name = "marshmallow" -version = "3.21.1" +version = "3.22.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, - {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, + {file = "marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9"}, + {file = "marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e"}, ] [package.dependencies] @@ -1611,7 +1622,7 @@ packaging = ">=17.0" [package.extras] dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] tests = ["pytest", "pytz", "simplejson"] [[package]] @@ -1655,13 +1666,13 @@ test = ["unittest2 (>=1.1.0)"] [[package]] name = "more-itertools" -version = "10.2.0" +version = "10.4.0" description = "More routines for operating on iterables, beyond itertools" optional = false python-versions = ">=3.8" files = [ - {file = "more-itertools-10.2.0.tar.gz", hash = "sha256:8fccb480c43d3e99a00087634c06dd02b0d50fbf088b380de5a41a015ec239e1"}, - {file = "more_itertools-10.2.0-py3-none-any.whl", hash = "sha256:686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684"}, + {file = "more-itertools-10.4.0.tar.gz", hash = "sha256:fe0e63c4ab068eac62410ab05cccca2dc71ec44ba8ef29916a0090df061cf923"}, + {file = "more_itertools-10.4.0-py3-none-any.whl", hash = "sha256:0f7d9f83a0a8dcfa8a2694a770590d98a67ea943e3d9f5298309a484758c4e27"}, ] [[package]] @@ -1718,24 +1729,24 @@ six = ">=1.6.1" [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] name = "paramiko" -version = "3.4.0" +version = "3.4.1" description = "SSH2 protocol library" optional = false python-versions = ">=3.6" files = [ - {file = "paramiko-3.4.0-py3-none-any.whl", hash = "sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7"}, - {file = "paramiko-3.4.0.tar.gz", hash = "sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3"}, + {file = "paramiko-3.4.1-py3-none-any.whl", hash = "sha256:8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32"}, + {file = "paramiko-3.4.1.tar.gz", hash = "sha256:8b15302870af7f6652f2e038975c1d2973f06046cb5d7d65355668b3ecbece0c"}, ] [package.dependencies] @@ -1750,13 +1761,13 @@ invoke = ["invoke (>=2.0)"] [[package]] name = "pbr" -version = "6.0.0" +version = "6.1.0" description = "Python Build Reasonableness" optional = false python-versions = ">=2.6" files = [ - {file = "pbr-6.0.0-py2.py3-none-any.whl", hash = "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda"}, - {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, + {file = "pbr-6.1.0-py2.py3-none-any.whl", hash = "sha256:a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a"}, + {file = "pbr-6.1.0.tar.gz", hash = "sha256:788183e382e3d1d7707db08978239965e8b9e4e5ed42669bf4758186734d5f24"}, ] [[package]] @@ -1780,28 +1791,27 @@ resolved_reference = "6ad948acc21e34ce420fdde927679bd7e1276951" [[package]] name = "pluggy" -version = "1.5.0" +version = "0.13.1" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.8" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, + {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, + {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, ] [package.extras] dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] [[package]] name = "prometheus-client" -version = "0.9.0" +version = "0.20.0" description = "Python client for the Prometheus monitoring system." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "prometheus_client-0.9.0-py2.py3-none-any.whl", hash = "sha256:b08c34c328e1bf5961f0b4352668e6c8f145b4a087e09b7296ef62cbe4693d35"}, - {file = "prometheus_client-0.9.0.tar.gz", hash = "sha256:9da7b32f02439d8c04f7777021c304ed51d9ec180604700c1ba72a4d44dceb03"}, + {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, + {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, ] [package.extras] @@ -1809,39 +1819,39 @@ twisted = ["twisted"] [[package]] name = "proto-plus" -version = "1.23.0" +version = "1.24.0" description = "Beautiful, Pythonic protocol buffers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, - {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, + {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, + {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, ] [package.dependencies] -protobuf = ">=3.19.0,<5.0.0dev" +protobuf = ">=3.19.0,<6.0.0dev" [package.extras] -testing = ["google-api-core[grpc] (>=1.31.5)"] +testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.25.3" +version = "5.27.3" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, - {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, - {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, - {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, - {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, - {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, - {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, - {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, - {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, + {file = "protobuf-5.27.3-cp310-abi3-win32.whl", hash = "sha256:dcb307cd4ef8fec0cf52cb9105a03d06fbb5275ce6d84a6ae33bc6cf84e0a07b"}, + {file = "protobuf-5.27.3-cp310-abi3-win_amd64.whl", hash = "sha256:16ddf3f8c6c41e1e803da7abea17b1793a97ef079a912e42351eabb19b2cffe7"}, + {file = "protobuf-5.27.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:68248c60d53f6168f565a8c76dc58ba4fa2ade31c2d1ebdae6d80f969cdc2d4f"}, + {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b8a994fb3d1c11156e7d1e427186662b64694a62b55936b2b9348f0a7c6625ce"}, + {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:a55c48f2a2092d8e213bd143474df33a6ae751b781dd1d1f4d953c128a415b25"}, + {file = "protobuf-5.27.3-cp38-cp38-win32.whl", hash = "sha256:043853dcb55cc262bf2e116215ad43fa0859caab79bb0b2d31b708f128ece035"}, + {file = "protobuf-5.27.3-cp38-cp38-win_amd64.whl", hash = "sha256:c2a105c24f08b1e53d6c7ffe69cb09d0031512f0b72f812dd4005b8112dbe91e"}, + {file = "protobuf-5.27.3-cp39-cp39-win32.whl", hash = "sha256:c84eee2c71ed83704f1afbf1a85c3171eab0fd1ade3b399b3fad0884cbcca8bf"}, + {file = "protobuf-5.27.3-cp39-cp39-win_amd64.whl", hash = "sha256:af7c0b7cfbbb649ad26132e53faa348580f844d9ca46fd3ec7ca48a1ea5db8a1"}, + {file = "protobuf-5.27.3-py3-none-any.whl", hash = "sha256:8572c6533e544ebf6899c360e91d6bcbbee2549251643d32c52cf8a5de295ba5"}, + {file = "protobuf-5.27.3.tar.gz", hash = "sha256:82460903e640f2b7e34ee81a947fdaad89de796d324bcbc38ff5430bcdead82c"}, ] [[package]] @@ -1879,13 +1889,13 @@ files = [ [[package]] name = "pyaml" -version = "24.4.0" +version = "24.7.0" description = "PyYAML-based module to produce a bit more pretty and readable YAML-serialized data" optional = false python-versions = ">=3.8" files = [ - {file = "pyaml-24.4.0-py3-none-any.whl", hash = "sha256:acc2b39c55cb0cbe4f694a6d3886f89ad3d2a5b3efcece526202f8de9a6b27de"}, - {file = "pyaml-24.4.0.tar.gz", hash = "sha256:0e483d9289010e747a325dc43171bcc39d6562dd1dd4719e8cc7e7c96c99fce6"}, + {file = "pyaml-24.7.0-py3-none-any.whl", hash = "sha256:6b06596cb5ac438a3fad1e1bf5775088c4d3afb927e2b03a29305d334835deb2"}, + {file = "pyaml-24.7.0.tar.gz", hash = "sha256:5d0fdf9e681036fb263a783d0298fc3af580a6e2a6cf1a3314ffc48dc3d91ccb"}, ] [package.dependencies] @@ -1976,13 +1986,13 @@ files = [ [[package]] name = "pyjwt" -version = "2.8.0" +version = "2.9.0" description = "JSON Web Token implementation in Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, ] [package.dependencies] @@ -1990,8 +2000,8 @@ cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryp [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] @@ -2022,13 +2032,13 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyparsing" -version = "3.1.2" +version = "3.1.4" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, - {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, + {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, + {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, ] [package.extras] @@ -2036,58 +2046,63 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "3.10.1" +version = "5.4.3" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.5" files = [ - {file = "pytest-3.10.1-py2.py3-none-any.whl", hash = "sha256:3f193df1cfe1d1609d4c583838bea3d532b18d6160fd3f55c9447fdca30848ec"}, - {file = "pytest-3.10.1.tar.gz", hash = "sha256:e246cf173c01169b9617fc07264b7b1316e78d7a650055235d6d897bc80d9660"}, + {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"}, + {file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"}, ] [package.dependencies] -atomicwrites = ">=1.0" +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=17.4.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} more-itertools = ">=4.0.0" -pluggy = ">=0.7" +packaging = "*" +pluggy = ">=0.12,<1.0" py = ">=1.5.0" -setuptools = "*" -six = ">=1.10.0" +wcwidth = "*" + +[package.extras] +checkqa-mypy = ["mypy (==v0.761)"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] [[package]] name = "pytest-cov" -version = "2.9.0" +version = "2.12.1" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pytest-cov-2.9.0.tar.gz", hash = "sha256:b6a814b8ed6247bd81ff47f038511b57fe1ce7f4cc25b9106f1a4b106f1d9322"}, - {file = "pytest_cov-2.9.0-py2.py3-none-any.whl", hash = "sha256:c87dfd8465d865655a8213859f1b4749b43448b5fae465cb981e16d52a811424"}, + {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, + {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, ] [package.dependencies] -coverage = ">=4.4" -pytest = ">=3.6" +coverage = ">=5.2.1" +pytest = ">=4.6" +toml = "*" [package.extras] -testing = ["fields", "hunter", "process-tests (==2.0.2)", "pytest-xdist", "six", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "pytest-flask" -version = "0.15.1" +version = "1.3.0" description = "A set of py.test fixtures to test Flask applications." optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "pytest-flask-0.15.1.tar.gz", hash = "sha256:cbd8c5b9f8f1b83e9c159ac4294964807c4934317a5fba181739ac15e1b823e6"}, - {file = "pytest_flask-0.15.1-py2.py3-none-any.whl", hash = "sha256:9001f6128c5c4a0d243ce46c117f3691052828d2faf39ac151b8388657dce447"}, + {file = "pytest-flask-1.3.0.tar.gz", hash = "sha256:58be1c97b21ba3c4d47e0a7691eb41007748506c36bf51004f78df10691fa95e"}, + {file = "pytest_flask-1.3.0-py3-none-any.whl", hash = "sha256:c0e36e6b0fddc3b91c4362661db83fa694d1feb91fa505475be6732b5bc8c253"}, ] [package.dependencies] Flask = "*" -pytest = ">=3.6" -Werkzeug = ">=0.7" +pytest = ">=5.2" +Werkzeug = "*" [package.extras] docs = ["Sphinx", "sphinx-rtd-theme"] @@ -2163,73 +2178,75 @@ files = [ [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -2244,13 +2261,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "responses" -version = "0.25.0" +version = "0.25.3" description = "A utility library for mocking out the `requests` Python library." optional = false python-versions = ">=3.8" files = [ - {file = "responses-0.25.0-py3-none-any.whl", hash = "sha256:2f0b9c2b6437db4b528619a77e5d565e4ec2a9532162ac1a131a83529db7be1a"}, - {file = "responses-0.25.0.tar.gz", hash = "sha256:01ae6a02b4f34e39bffceb0fc6786b67a25eae919c6368d05eabc8d9576c2a66"}, + {file = "responses-0.25.3-py3-none-any.whl", hash = "sha256:521efcbc82081ab8daa588e08f7e8a64ce79b91c39f6e62199b19159bea7dbcb"}, + {file = "responses-0.25.3.tar.gz", hash = "sha256:617b9247abd9ae28313d57a75880422d55ec63c29d33d629697590a034358dba"}, ] [package.dependencies] @@ -2292,13 +2309,13 @@ pyasn1 = ">=0.1.3" [[package]] name = "s3transfer" -version = "0.10.1" +version = "0.10.2" description = "An Amazon S3 Transfer Manager" optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d"}, - {file = "s3transfer-0.10.1.tar.gz", hash = "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19"}, + {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, + {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, ] [package.dependencies] @@ -2307,22 +2324,6 @@ botocore = ">=1.33.2,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] -[[package]] -name = "setuptools" -version = "69.5.1" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, - {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "six" version = "1.16.0" @@ -2347,57 +2348,55 @@ files = [ [[package]] name = "sqlalchemy" -version = "1.4.52" +version = "1.4.53" description = "Database Abstraction Library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "SQLAlchemy-1.4.52-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f68016f9a5713684c1507cc37133c28035f29925c75c0df2f9d0f7571e23720a"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bb0f81fbbb13d737b7f76d1821ec0b117ce8cbb8ee5e8641ad2de41aa916d3"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e93983cc0d2edae253b3f2141b0a3fb07e41c76cd79c2ad743fc27eb79c3f6db"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:84e10772cfc333eb08d0b7ef808cd76e4a9a30a725fb62a0495877a57ee41d81"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:427988398d2902de042093d17f2b9619a5ebc605bf6372f7d70e29bde6736842"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win32.whl", hash = "sha256:1296f2cdd6db09b98ceb3c93025f0da4835303b8ac46c15c2136e27ee4d18d94"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win_amd64.whl", hash = "sha256:80e7f697bccc56ac6eac9e2df5c98b47de57e7006d2e46e1a3c17c546254f6ef"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f251af4c75a675ea42766880ff430ac33291c8d0057acca79710f9e5a77383d"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8f9e4c4718f111d7b530c4e6fb4d28f9f110eb82e7961412955b3875b66de0"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afb1672b57f58c0318ad2cff80b384e816735ffc7e848d8aa51e0b0fc2f4b7bb"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win32.whl", hash = "sha256:6e41cb5cda641f3754568d2ed8962f772a7f2b59403b95c60c89f3e0bd25f15e"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win_amd64.whl", hash = "sha256:5bed4f8c3b69779de9d99eb03fd9ab67a850d74ab0243d1be9d4080e77b6af12"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:49e3772eb3380ac88d35495843daf3c03f094b713e66c7d017e322144a5c6b7c"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:618827c1a1c243d2540314c6e100aee7af09a709bd005bae971686fab6723554"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9acf369aaadb71a725b7e83a5ef40ca3de1cf4cdc93fa847df6b12d3cd924b"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win32.whl", hash = "sha256:763bd97c4ebc74136ecf3526b34808c58945023a59927b416acebcd68d1fc126"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win_amd64.whl", hash = "sha256:f12aaf94f4d9679ca475975578739e12cc5b461172e04d66f7a3c39dd14ffc64"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:853fcfd1f54224ea7aabcf34b227d2b64a08cbac116ecf376907968b29b8e763"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f98dbb8fcc6d1c03ae8ec735d3c62110949a3b8bc6e215053aa27096857afb45"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e135fff2e84103bc15c07edd8569612ce317d64bdb391f49ce57124a73f45c5"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b5de6af8852500d01398f5047d62ca3431d1e29a331d0b56c3e14cb03f8094c"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3491c85df263a5c2157c594f54a1a9c72265b75d3777e61ee13c556d9e43ffc9"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win32.whl", hash = "sha256:427c282dd0deba1f07bcbf499cbcc9fe9a626743f5d4989bfdfd3ed3513003dd"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win_amd64.whl", hash = "sha256:ca5ce82b11731492204cff8845c5e8ca1a4bd1ade85e3b8fcf86e7601bfc6a39"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:29d4247313abb2015f8979137fe65f4eaceead5247d39603cc4b4a610936cd2b"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a752bff4796bf22803d052d4841ebc3c55c26fb65551f2c96e90ac7c62be763a"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7ea11727feb2861deaa293c7971a4df57ef1c90e42cb53f0da40c3468388000"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d913f8953e098ca931ad7f58797f91deed26b435ec3756478b75c608aa80d139"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a251146b921725547ea1735b060a11e1be705017b568c9f8067ca61e6ef85f20"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win32.whl", hash = "sha256:1f8e1c6a6b7f8e9407ad9afc0ea41c1f65225ce505b79bc0342159de9c890782"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win_amd64.whl", hash = "sha256:346ed50cb2c30f5d7a03d888e25744154ceac6f0e6e1ab3bc7b5b77138d37710"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4dae6001457d4497736e3bc422165f107ecdd70b0d651fab7f731276e8b9e12d"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5d2e08d79f5bf250afb4a61426b41026e448da446b55e4770c2afdc1e200fce"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbce5dd7c7735e01d24f5a60177f3e589078f83c8a29e124a6521b76d825b85"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bdb7b4d889631a3b2a81a3347c4c3f031812eb4adeaa3ee4e6b0d028ad1852b5"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c294ae4e6bbd060dd79e2bd5bba8b6274d08ffd65b58d106394cb6abbf35cf45"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win32.whl", hash = "sha256:bcdfb4b47fe04967669874fb1ce782a006756fdbebe7263f6a000e1db969120e"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win_amd64.whl", hash = "sha256:7d0dbc56cb6af5088f3658982d3d8c1d6a82691f31f7b0da682c7b98fa914e91"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a551d5f3dc63f096ed41775ceec72fdf91462bb95abdc179010dc95a93957800"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab773f9ad848118df7a9bbabca53e3f1002387cdbb6ee81693db808b82aaab0"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2de46f5d5396d5331127cfa71f837cca945f9a2b04f7cb5a01949cf676db7d1"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7027be7930a90d18a386b25ee8af30514c61f3852c7268899f23fdfbd3107181"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99224d621affbb3c1a4f72b631f8393045f4ce647dd3262f12fe3576918f8bf3"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win32.whl", hash = "sha256:c124912fd4e1bb9d1e7dc193ed482a9f812769cb1e69363ab68e01801e859821"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win_amd64.whl", hash = "sha256:2c286fab42e49db23c46ab02479f328b8bdb837d3e281cae546cc4085c83b680"}, - {file = "SQLAlchemy-1.4.52.tar.gz", hash = "sha256:80e63bbdc5217dad3485059bdf6f65a7d43f33c8bde619df5c220edf03d87296"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b61ac5457d91b5629a3dea2b258deb4cdd35ac8f6fa2031d2b9b2fff5b3396da"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a96aa8d425047551676b0e178ddb0683421e78eda879ab55775128b2e612cae"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e10ac36f0b994235c13388b39598bf27219ec8bdea5be99bdac612b01cbe525"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:437592b341a3229dd0443c9c803b0bf0a466f8f539014fef6cdb9c06b7edb7f9"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:784272ceb5eb71421fea9568749bcbe8bd019261a0e2e710a7efa76057af2499"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-win32.whl", hash = "sha256:122d7b5722df1a24402c6748bbb04687ef981493bb559d0cc0beffe722e0e6ed"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-win_amd64.whl", hash = "sha256:4604d42b2abccba266d3f5bbe883684b5df93e74054024c70d3fbb5eea45e530"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fb8e15dfa47f5de11ab073e12aadd6b502cfb7ac4bafd18bd18cfd1c7d13dbbc"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc8be4df55e8fde3006d9cb1f6b3df2ba26db613855dc4df2c0fcd5ec15cb3b7"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b11640251f9a9789fd96cd6e5d176b1c230230c70ad40299bcbcc568451b4c"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-win32.whl", hash = "sha256:cd534c716f86bdf95b7b984a34ee278c91d1b1d7d183e7e5ff878600b1696046"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-win_amd64.whl", hash = "sha256:6dd06572872ca13ef5a90306a3e5af787498ddaa17fb00109b1243642646cd69"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2774c24c405136c3ef472e2352bdca7330659d481fbf2283f996c0ef9eb90f22"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68a614765197b3d13a730d631a78c3bb9b3b72ba58ed7ab295d58d517464e315"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d13d4dfbc6e52363886b47cf02cf68c5d2a37c468626694dc210d7e97d4ad330"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-win32.whl", hash = "sha256:197065b91456574d70b6459bfa62bc0b52a4960a29ef923c375ec427274a3e05"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-win_amd64.whl", hash = "sha256:421306c4b936b0271a3ce2dc074928d5ece4a36f9c482daa5770f44ecfc3a883"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:13fc34b35d8ddb3fbe3f8fcfdf6c2546e676187f0fb20f5774da362ddaf8fa2d"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626be971ff89541cfd3e70b54be00b57a7f8557204decb6223ce0428fec058f3"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:991e42fdfec561ebc6a4fae7161a86d129d6069fa14210b96b8dd752afa7059c"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:95123f3a1e0e8020848fd32ba751db889a01a44e4e4fef7e58c87ddd0b2fca59"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c58e011e9e6373b3a091d83f20601fb335a3b4bace80bfcb914ac168aad3b70d"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:670c7769bf5dcae9aff331247b5d82fe635c63731088a46ce68ba2ba519ef36e"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07ba54f09033d387ae9df8d62cbe211ed7304e0bfbece1f8c55e21db9fae5c11"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a38834b4c183c33daf58544281395aad2e985f0b47cca1e88ea5ada88344e63"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:616492f5315128a847f293a7c552f3561ac7e996d2aa5dc46bef4fb0d3781f1d"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0cf8c0af9563892c6632f7343bc393dfce6eeef8e4d10c5fadba9c0390520bd"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-win32.whl", hash = "sha256:c05fe05941424c2f3747a8952381b7725e24cba2ca00141380e54789d5b616b6"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-win_amd64.whl", hash = "sha256:93e90aa3e3b2f8e8cbae4d5509f8e0cf82972378d323c740a8df1c1e9f484172"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:9d7368df54d3ed45a18955f6cec38ebe075290594ac0d5c87a8ddaff7e10de27"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89d8ac4158ef68eea8bb0f6dd0583127d9aa8720606964ba8eee20b254f9c83a"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16bb9fa4d00b4581b14d9f0e2224dc7745b854aa4687738279af0f48f7056c98"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4fe5168d0249c23f537950b6d75935ff2709365a113e29938a979aec36668ecf"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8608d162d3bd29d807aab32c3fb6e2f8e225a43d1c54c917fed38513785380"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-win32.whl", hash = "sha256:a9d4d132198844bd6828047135ce7b887687c92925049a2468a605fc775c7a1a"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-win_amd64.whl", hash = "sha256:c15d1f1fcf1f9bec0499ae1d9132b950fcc7730f2d26d10484c8808b4e077816"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:edf094a20a386ff2ec73de65ef18014b250259cb860edc61741e240ca22d6981"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a9c3514ff19d9d30d8a8d378b24cd1dfa5528d20891481cb5f196117db6a48"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaaeedbceb4dfd688fff2faf25a9a87a391f548811494f7bff7fa701b639abc3"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d021699b9007deb7aa715629078830c99a5fec2753d9bdd5ff33290d363ef755"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0465b8a68f8f4de754c1966c45b187ac784ad97bc9747736f913130f0e1adea0"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-win32.whl", hash = "sha256:5f67b9e9dcac3241781e96575468d55a42332157dee04bdbf781df573dff5f85"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-win_amd64.whl", hash = "sha256:a8c2f2a0b2c4e3b86eb58c9b6bb98548205eea2fba9dae4edfd29dc6aebbe95a"}, + {file = "SQLAlchemy-1.4.53.tar.gz", hash = "sha256:5e6ab710c4c064755fd92d1a417bef360228a19bdf0eee32b03aa0f5f8e9fe0d"}, ] [package.dependencies] @@ -2408,31 +2407,42 @@ aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)", "mariadb (>=1.0.1,!=1.1.2)"] mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] +mssql-pymssql = ["pymssql", "pymssql"] +mssql-pyodbc = ["pyodbc", "pyodbc"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql-connector = ["mysql-connector-python"] +mysql-connector = ["mysql-connector-python", "mysql-connector-python"] oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-asyncpg = ["asyncpg", "asyncpg", "greenlet (!=0.4.17)", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)", "pg8000 (>=1.16.6,!=1.29.0)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] sqlcipher = ["sqlcipher3_binary"] +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + [[package]] name = "typing-extensions" -version = "4.11.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -2448,13 +2458,13 @@ files = [ [[package]] name = "urllib3" -version = "1.26.18" +version = "1.26.19" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, - {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, + {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, + {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, ] [package.extras] @@ -2464,13 +2474,13 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -2493,15 +2503,26 @@ files = [ cdislogging = "*" sqlalchemy = ">=1.3.3" +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + [[package]] name = "werkzeug" -version = "2.3.8" +version = "3.0.4" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-2.3.8-py3-none-any.whl", hash = "sha256:bba1f19f8ec89d4d607a3bd62f1904bd2e609472d93cd85e9d4e178f472c3748"}, - {file = "werkzeug-2.3.8.tar.gz", hash = "sha256:554b257c74bbeb7a0d254160a4f8ffe185243f52a52035060b761ca62d977f03"}, + {file = "werkzeug-3.0.4-py3-none-any.whl", hash = "sha256:02c9eb92b7d6c06f31a782811505d2157837cea66aaede3e217c7c27c039476c"}, + {file = "werkzeug-3.0.4.tar.gz", hash = "sha256:34f2371506b250df4d4f84bfe7b0921e4762525762bbd936614909fe25cd7306"}, ] [package.dependencies] @@ -2619,20 +2640,24 @@ files = [ [[package]] name = "zipp" -version = "3.18.1" +version = "3.20.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, - {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, + {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, + {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0.0" -content-hash = "d5cbebc2458df9ebe86afb177b9401cf8d76c87cf1cb4527adb6d34c68f2f0e6" +content-hash = "c65a486b449f3ea2f112f9ce88c76f029ac92af40bc6099056208269062faa86" diff --git a/pyproject.toml b/pyproject.toml old mode 100755 new mode 100644 index 1b77eaa3c..6764beb2c --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "fence" -version = "9.3.4" +version = "10.1.0" description = "Gen3 AuthN/AuthZ OIDC Service" authors = ["CTDS UChicago "] license = "Apache-2.0" @@ -13,16 +13,8 @@ include = [ [tool.poetry.dependencies] python = ">=3.9,<4.0.0" alembic = "^1.7.7" - -# Temporarily override authlib with a modified, forked version -# where we've made a security patch that got applied in a much later -# version. -# -# This is temporary while we work on the upgrade to the latest version -authlib = {git = "https://github.com/uc-cdis/authlib", rev = "v0.11_CVE_patch_v1"} -# authlib = "*" # let authutils decide which version we're using - -authutils = "^6.2.2" +authlib = "*" #let authutils decide which version to use +authutils = "^6.2.3" bcrypt = "^3.1.4" boto3 = "*" botocore = "*" @@ -30,10 +22,8 @@ cached_property = "^1.5.1" cdiserrors = "<2.0.0" cdislogging = "^1.0.0" cdispyutils = "^2.0.1" +flask = ">=3.0.0" cryptography = ">=42.0.5" - -# this will be updated when authlib is updated -flask = "==2.3.2" flask-cors = ">=3.0.3" flask-restful = ">=0.3.8" email_validator = "^1.1.1" @@ -41,14 +31,14 @@ gen3authz = "^1.5.1" gen3cirrus = ">=3.0.1" gen3config = ">=1.1.0" gen3users = "^1.0.2" -idna = "^2.10" # https://github.com/python-poetry/poetry/issues/3555 +idna = "^3.7" markdown = "^3.1.1" # this markupsafe pin is due to an error somewhere between Python 3.9.6 and 3.9.16 markupsafe = "^2.0.1" paramiko = ">=2.6.0" -prometheus-client = "^0.9.0" +prometheus-client = "<1" psycopg2 = "^2.8.3" pycryptodome = "3.9.8" PyJWT = "^2.4.0" @@ -61,13 +51,16 @@ sqlalchemy = "^1.3.3" marshmallow-sqlalchemy = "1.0.0" # storageclient = {git = "https://github.com/uc-cdis/storage-client", rev = "1.0.2"} userdatamodel = ">=2.4.3" -werkzeug = ">=2.2.3,<3.0.0" +werkzeug = ">=3.0.0" cachelib = "^0.2.0" azure-storage-blob = "^12.6.0" Flask-WTF = "^1.0.0" pcdcutils = {git = "https://github.com/chicagopcdc/pcdcutils.git", rev = "0.1.2"} # pcdcutils = {git = "https://github.com/chicagopcdc/pcdcutils.git", rev = "pcdc_dev"} boto = "*" +# NOTE: +# for testing with updated libaries as git repos: +# foobar = {git = "https://github.com/uc-cdis/some-repo", rev = "feat/test"} [tool.poetry.dev-dependencies] addict = "^2.2.1" @@ -76,9 +69,9 @@ codacy-coverage = "^1.3.11" coveralls = "^2.1.1" mock = "^2.0.0" moto = "^1.1.24" -pytest = "^3.2.3" +pytest = "^5.2.0" pytest-cov = "^2.5.1" -pytest-flask = ">=0.15.0" +pytest-flask = ">=1.3.0" [tool.poetry.scripts] fence-create = 'bin.fence_create:main' diff --git a/tests/admin/test_admin_users_endpoints.py b/tests/admin/test_admin_users_endpoints.py index 568f85169..1a310671b 100644 --- a/tests/admin/test_admin_users_endpoints.py +++ b/tests/admin/test_admin_users_endpoints.py @@ -112,14 +112,12 @@ def load_non_google_user_data(db_session, test_user_d): client = Client( client_id=userd_dict["client_id"], user_id=userd_dict["user_id"], - issued_at=420, - expires_at=42020, - redirect_uri="dclient.com", - grant_type="dgrant", - response_type="dresponse", - scope="dscope", + client_id_issued_at=420, + client_secret_expires_at=42020, + redirect_uris="dclient.com", + response_types="dresponse", name="dclientname", - _allowed_scopes="dallscopes", + allowed_scopes="dallscopes", ) grp = Group(id=userd_dict["group_id"]) usr_grp = UserToGroup( @@ -778,6 +776,7 @@ def assert_google_proxy_group_data_deleted(db_session): def test_delete_user_username( + app, client, admin_user, encoded_admin_jwt, diff --git a/tests/ci_commands_script.sh b/tests/ci_commands_script.sh new file mode 100755 index 000000000..fef98a668 --- /dev/null +++ b/tests/ci_commands_script.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +mkdir -p /var/tmp/uwsgi_flask_metrics/ || true +export PROMETHEUS_MULTIPROC_DIR="/var/tmp/uwsgi_flask_metrics/" +poetry run pytest -vv --cov=fence --cov-report xml tests diff --git a/tests/conftest.py b/tests/conftest.py index ac2b19313..78b90dbf2 100755 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -86,6 +86,7 @@ def mock_get_bucket_location(self, bucket, config): def claims_refresh(): new_claims = tests.utils.default_claims() new_claims["pur"] = "refresh" + new_claims["azp"] = "test-client" return new_claims @@ -474,7 +475,6 @@ def app(kid, rsa_private_key, rsa_public_key): yield fence.app - alembic_main(["--raiseerr", "downgrade", "base"]) mocker.unmock_functions() @@ -542,7 +542,9 @@ def drop_all(): connection = app.db.engine.connect() connection.begin() for table in reversed(models.Base.metadata.sorted_tables): - connection.execute(table.delete()) + # Delete table only if it exists + if app.db.engine.dialect.has_table(connection, table): + connection.execute(table.delete()) connection.close() request.addfinalizer(drop_all) @@ -550,6 +552,13 @@ def drop_all(): return app.db +@pytest.fixture +def prometheus_metrics_before(client): + resp = client.get("/metrics") + assert resp.status_code == 200, "Could not get prometheus metrics initial state" + yield resp.text + + @fence.app.route("/protected") @fence.auth.login_required({"access"}) def protected_endpoint(methods=["GET"]): @@ -964,6 +973,7 @@ def do_patch(authz): "mocker": mocker, # only gs or s3 for location, ignore specifiers after the _ "indexed_file_location": protocol.split("_")[0], + "record": record, } return output @@ -1358,7 +1368,7 @@ def oauth_client_B(app, request, db_session): @pytest.fixture(scope="function") -def oauth_client_public(app, db_session, oauth_user): +def oauth_client_public(app, db_session, oauth_user, get_all_shib_idps_patcher): """ Create a public OAuth2 client. """ @@ -1428,7 +1438,7 @@ def oauth_test_client_B(client, oauth_client_B): @pytest.fixture(scope="function") -def oauth_test_client_public(client, oauth_client_public): +def oauth_test_client_public(client, oauth_client_public, get_all_shib_idps_patcher): return OAuth2TestClient(client, oauth_client_public, confidential=False) @@ -1732,3 +1742,37 @@ def get_all_shib_idps_patcher(): yield mock get_all_shib_idps_patch.stop() + + +@pytest.fixture(scope="function") +def mock_authn_user_flask_context(app): + """ + Mock g and session to simulate a simple user who has authenticated. + + This is primarily to ensure that tests which mock the start of authN where sessions get set can still + test the callbacks (where metrics logging rely on session data). + """ + from flask import g + from flask import session + + g_before = copy.deepcopy(g) + session_before = copy.deepcopy(session) + + user_mock = MagicMock() + user_mock.id = 1 + + user_mocker = MagicMock() + user_mocker.return_value = user_mock + g.user = user_mocker + + session = MagicMock() + session.return_value = { + "fence_idp": "google", + "shib_idp": "shib_idp_foobar", + "client_id": "client_id_foobar", + } + + yield + + g = g_before + session = session_before diff --git a/tests/data/test_boto_manager.py b/tests/data/test_boto_manager.py new file mode 100644 index 000000000..2db6a8c12 --- /dev/null +++ b/tests/data/test_boto_manager.py @@ -0,0 +1,106 @@ +import pytest +from unittest.mock import MagicMock, patch +from fence.resources.aws.boto_manager import BotoManager + + +class TestData: + """Generate bucket test data that aims to mirror the default example Fence config file.""" + def __init__(self): + self.config = {} + self.buckets = {} + + def single_bucket(self): + self.config = { + 'CRED1': {'access_key': 'key1', 'secret_key': 'secret1'}, + } + self.buckets = { + 'bucket1': {'cred': 'CRED1', 'region': 'us-east-1', 'endpoint_url': 'https://example.com'}, + } + return self + + def multiple_buckets(self): + single_bucket = self.single_bucket() + self.config = single_bucket.config | { + 'CRED2': {'access_key': 'key2', 'secret_key': 'secret2'}, + } + self.buckets = single_bucket.buckets | { + 'bucket2': {'cred': 'CRED2', 'region': 'us-east-1'}, + 'bucket3': {'cred': '*'}, + 'bucket4': {'cred': 'CRED1', 'region': 'us-east-1', 'role-arn': 'arn:aws:iam::role1'} + } + return self + + +@patch('fence.resources.aws.boto_manager.client') +def test_create_s3_client_single(mock_client): + test_data = TestData().single_bucket() + config = test_data.config + buckets = test_data.buckets + logger = MagicMock() + boto_manager = BotoManager(config, buckets, logger) + + s3_clients = boto_manager.create_s3_clients(config, buckets) + + # Assert that the correct call was made to the client function + mock_client.assert_any_call('s3', access_key='key1', secret_key='secret1', endpoint_url='https://example.com') + + # Assert that the returned dictionary contains the correct client + assert len(s3_clients) == 1 + assert 'bucket1' in s3_clients + + +@patch('fence.resources.aws.boto_manager.client') +def test_create_s3_clients_multiple(mock_client): + test_data = TestData().multiple_buckets() + config = test_data.config + buckets = test_data.buckets + logger = MagicMock() + boto_manager = BotoManager(config, buckets, logger) + + # Call the method under test + s3_clients = boto_manager.create_s3_clients(config, buckets) + + # Assert that the correct calls were made to the client function + mock_client.assert_any_call('s3', access_key='key1', secret_key='secret1', endpoint_url='https://example.com') + mock_client.assert_any_call('s3', access_key='key2', secret_key='secret2') + mock_client.assert_any_call('s3') + mock_client.assert_any_call('s3', access_key='key1', secret_key='secret1') + + # Assert that the returned dictionary contains the correct clients + assert len(s3_clients) == 4 + assert 'bucket1' in s3_clients + assert 'bucket2' in s3_clients + assert 'bucket3' in s3_clients + assert 'bucket4' in s3_clients + + +@pytest.mark.parametrize("bucket", ['bucket1', 'bucket2', 'bucket3', 'bucket4']) +@patch('fence.resources.aws.boto_manager.client') +def test_delete_data_file(mock_client, bucket): + test_data = TestData().multiple_buckets() + config = test_data.config + buckets = test_data.buckets + logger = MagicMock() + boto_manager = BotoManager(config, buckets, logger) + + # Mock the response of list_objects_v2 to include the desired key + prefix = 'data/file.txt' + mock_list_objects_v2_response = { + 'Contents': [{'Key': prefix}] + } + # Set up the mock S3 client and its list_objects_v2 and delete_object methods + mock_s3_client = mock_client.return_value + mock_s3_client.list_objects_v2.return_value = mock_list_objects_v2_response + + result = boto_manager.delete_data_file(bucket, prefix) + + # Create S3 clients for each of the buckets + _ = boto_manager.create_s3_clients(config, buckets) + s3_client = boto_manager.get_s3_client(bucket) + s3_client.list_objects_v2.assert_called_once_with( + Bucket=bucket, Prefix=prefix, Delimiter="/" + ) + s3_client.delete_object.assert_called_once_with(Bucket=bucket, Key='data/file.txt') + + # Assert the expected result + assert result == ("", 204) diff --git a/tests/data/test_data.py b/tests/data/test_data.py index 6d6186bec..568fb97bd 100755 --- a/tests/data/test_data.py +++ b/tests/data/test_data.py @@ -819,6 +819,26 @@ def test_public_authz_object_upload_file( Test `GET /data/upload/1` in which the `1` Indexd record has authz populated with the public value. """ + did = str(uuid.uuid4()) + index_document = { + "did": did, + "baseid": "", + "rev": "", + "size": 10, + "file_name": "file1", + "urls": ["s3://bucket1/key-{}".format(did[:8])], + "acl": ["phs000789"], + "hashes": {}, + "metadata": {}, + "form": "", + "created_date": "", + "updated_date": "", + } + mock_index_document = mock.patch( + "fence.blueprints.data.indexd.BlankIndex.index_document", index_document + ) + mock_index_document.start() + indexd_client_accepting_record(INDEXD_RECORD_WITH_PUBLIC_AUTHZ_POPULATED) mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": True}, 200)}}) headers = { @@ -837,6 +857,8 @@ def test_public_authz_object_upload_file( assert response.status_code == 200 assert "url" in response.json + mock_index_document.stop() + def test_public_authz_and_acl_object_upload_file_with_failed_authz_check( client, @@ -885,6 +907,26 @@ def test_public_authz_and_acl_object_upload_file( acl populated with public values. In this case, authz takes precedence over acl. """ + did = str(uuid.uuid4()) + index_document = { + "did": did, + "baseid": "", + "rev": "", + "size": 10, + "file_name": "file1", + "urls": ["s3://bucket1/key-{}".format(did[:8])], + "acl": ["phs000789"], + "hashes": {}, + "metadata": {}, + "form": "", + "created_date": "", + "updated_date": "", + } + mock_index_document = mock.patch( + "fence.blueprints.data.indexd.BlankIndex.index_document", index_document + ) + mock_index_document.start() + indexd_client_accepting_record(INDEXD_RECORD_WITH_PUBLIC_AUTHZ_AND_ACL_POPULATED) mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": True}, 200)}}) headers = { @@ -903,6 +945,8 @@ def test_public_authz_and_acl_object_upload_file( assert response.status_code == 200 assert "url" in response.json + mock_index_document.stop() + def test_non_public_authz_and_public_acl_object_upload_file( client, @@ -916,6 +960,26 @@ def test_non_public_authz_and_public_acl_object_upload_file( Test that a user can successfully generate an upload url for an Indexd record with a non-public authz field and a public acl field. """ + did = str(uuid.uuid4()) + index_document = { + "did": did, + "baseid": "", + "rev": "", + "size": 10, + "file_name": "file1", + "urls": ["s3://bucket1/key-{}".format(did[:8])], + "acl": ["phs000789"], + "hashes": {}, + "metadata": {}, + "form": "", + "created_date": "", + "updated_date": "", + } + mock_index_document = mock.patch( + "fence.blueprints.data.indexd.BlankIndex.index_document", index_document + ) + mock_index_document.start() + indexd_record_with_non_public_authz_and_public_acl_populated = { "did": "1", "baseid": "", @@ -951,6 +1015,8 @@ def test_non_public_authz_and_public_acl_object_upload_file( assert response.status_code == 200 assert "url" in response.json + mock_index_document.stop() + def test_anonymous_download_with_public_authz( client, @@ -1664,6 +1730,112 @@ def json(self): assert "uploadId" in response.json +def test_initialize_multipart_upload_with_guid_in_request( + app, client, auth_client, encoded_creds_jwt, user_client +): + """ + Test /data/multipart/init with guid parameter in request data + """ + + class MockResponse(object): + def __init__(self, data, status_code=200): + self.data = data + self.status_code = status_code + + def json(self): + return self.data + + data_requests_mocker = mock.patch( + "fence.blueprints.data.indexd.requests", new_callable=mock.Mock + ) + arborist_requests_mocker = mock.patch( + "gen3authz.client.arborist.client.httpx.Client.request", new_callable=mock.Mock + ) + + fence.blueprints.data.indexd.BlankIndex.init_multipart_upload = MagicMock() + with data_requests_mocker as data_requests, arborist_requests_mocker as arborist_requests: + did = str(uuid.uuid4()) + data_requests.get.return_value = MockResponse( + { + "did": did, + "baseid": "", + "rev": "", + "size": 10, + "file_name": "file1", + "urls": ["s3://bucket1/key"], + "hashes": {}, + "metadata": {}, + "authz": ["/open"], + "acl": ["*"], + "form": "", + "created_date": "", + "updated_date": "", + } + ) + data_requests.get.return_value.status_code = 200 + + arborist_requests.return_value = MockResponse({"auth": True}) + arborist_requests.return_value.status_code = 200 + fence.blueprints.data.indexd.BlankIndex.init_multipart_upload.return_value = ( + "test_uploadId" + ) + headers = { + "Authorization": "Bearer " + encoded_creds_jwt.jwt, + "Content-Type": "application/json", + } + file_name = "asdf" + data = json.dumps({"file_name": file_name, "guid": did}) + response = client.post("/data/multipart/init", headers=headers, data=data) + + assert response.status_code == 201, response + assert "guid" in response.json + assert did == response.json.get("guid") + assert "uploadId" in response.json + + +def test_initialize_multipart_upload_with_non_existent_guid_in_request( + app, client, auth_client, encoded_creds_jwt, user_client +): + """ + Test /data/multipart/init with guid parameter in request data but no guid exist in indexd + """ + + class MockResponse(object): + def __init__(self, data, status_code=200): + self.data = data + self.status_code = status_code + + def json(self): + return self.data + + data_requests_mocker = mock.patch( + "fence.blueprints.data.indexd.requests", new_callable=mock.Mock + ) + arborist_requests_mocker = mock.patch( + "gen3authz.client.arborist.client.httpx.Client.request", new_callable=mock.Mock + ) + + fence.blueprints.data.indexd.BlankIndex.init_multipart_upload = MagicMock() + with data_requests_mocker as data_requests, arborist_requests_mocker as arborist_requests: + did = str(uuid.uuid4()) + data_requests.get.return_value = MockResponse("no record found") + data_requests.get.return_value.status_code = 404 + arborist_requests.return_value = MockResponse({"auth": True}) + arborist_requests.return_value.status_code = 200 + fence.blueprints.data.indexd.BlankIndex.init_multipart_upload.return_value = ( + "test_uploadId" + ) + headers = { + "Authorization": "Bearer " + encoded_creds_jwt.jwt, + "Content-Type": "application/json", + } + file_name = "asdf" + data = json.dumps({"file_name": file_name, "guid": did}) + response = client.post("/data/multipart/init", headers=headers, data=data) + + assert response.status_code == 404, response + + def test_multipart_upload_presigned_url( app, client, auth_client, encoded_creds_jwt, user_client ): diff --git a/tests/link/test_link.py b/tests/link/test_link.py index 57e8bafbc..ae0e61fae 100644 --- a/tests/link/test_link.py +++ b/tests/link/test_link.py @@ -177,7 +177,6 @@ def test_google_link_auth_return( # manually set cookie for initial session client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, @@ -258,7 +257,6 @@ def test_patch_google_link( # manually set cookie for initial session client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, @@ -362,7 +360,6 @@ def test_patch_google_link_account_not_in_token( # manually set cookie for initial session client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, @@ -418,7 +415,6 @@ def test_patch_google_link_account_doesnt_exist( # manually set cookie for initial session client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, @@ -487,9 +483,9 @@ def test_google_link_g_account_exists( # manually set cookie for initial session client.set_cookie( - "localhost", - config["SESSION_COOKIE_NAME"], - test_session_jwt, + key=config["SESSION_COOKIE_NAME"], + value=test_session_jwt, + domain="localhost", httponly=True, samesite="Lax", ) @@ -566,7 +562,6 @@ def test_google_link_g_account_access_extension( # manually set cookie for initial session client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, @@ -653,7 +648,6 @@ def test_google_link_g_account_exists_linked_to_different_user( # manually set cookie for initial session client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, @@ -721,7 +715,6 @@ def test_google_link_no_proxy_group( # manually set cookie for initial session client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, @@ -807,7 +800,6 @@ def test_google_link_when_google_mocked( # manually set cookie for initial session client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, diff --git a/tests/link/test_link_id_token.py b/tests/link/test_link_id_token.py index a1c54c96a..5f3dff90a 100644 --- a/tests/link/test_link_id_token.py +++ b/tests/link/test_link_id_token.py @@ -12,6 +12,7 @@ def test_google_id_token_not_linked(oauth_test_client): Test google email and link expiration are in id_token for a linked account """ data = {"confirm": "yes"} + oauth_test_client.authorize(data=data) tokens = oauth_test_client.token() id_token = jwt.decode( diff --git a/tests/login/test_base.py b/tests/login/test_base.py index a9bfff7ec..a32452b2c 100644 --- a/tests/login/test_base.py +++ b/tests/login/test_base.py @@ -4,7 +4,7 @@ @patch("fence.blueprints.login.base.prepare_login_log") -def test_post_login_set_mfa(app, monkeypatch): +def test_post_login_set_mfa(app, monkeypatch, mock_authn_user_flask_context): """ Verifies the arborist is called with the mfa_policy if a given token contains the claims found in the configured multifactor_auth_claim_info @@ -37,7 +37,7 @@ def test_post_login_set_mfa(app, monkeypatch): @patch("fence.blueprints.login.base.prepare_login_log") -def test_post_login_no_mfa_enabled(app, monkeypatch): +def test_post_login_no_mfa_enabled(app, monkeypatch, mock_authn_user_flask_context): """ Verifies arborist is not called when there is no multifactor_auth_claim_info defined for the given IDP. """ diff --git a/tests/login/test_fence_login.py b/tests/login/test_fence_login.py index 96495fe47..d3d60b314 100644 --- a/tests/login/test_fence_login.py +++ b/tests/login/test_fence_login.py @@ -44,6 +44,7 @@ def config_idp_in_client( ], "OPENID_CONNECT": { "fence": { + "name": "other_fence_client", "client_id": "other_fence_client_id", "client_secret": "other_fence_client_secret", "api_base_url": "http://other-fence", @@ -52,7 +53,10 @@ def config_idp_in_client( }, } ) - app.fence_client = OAuthClient(**config["OPENID_CONNECT"]["fence"]) + client = OAuthClient(app) + client.register(**config["OPENID_CONNECT"]["fence"]) + app.fence_client = client + app.config["OPENID_CONNECT"]["fence"] = config["OPENID_CONNECT"]["fence"] yield Dict( client_id=config["OPENID_CONNECT"]["fence"]["client_id"], diff --git a/tests/login/test_google_login.py b/tests/login/test_google_login.py index 17d4bb8bd..f7550b47d 100644 --- a/tests/login/test_google_login.py +++ b/tests/login/test_google_login.py @@ -27,7 +27,6 @@ def test_google_login_http_headers_are_less_than_4k_for_user_with_many_projects( }, ) client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, diff --git a/tests/login/test_login_redirect.py b/tests/login/test_login_redirect.py index 8e7742db5..6915fe1b2 100644 --- a/tests/login/test_login_redirect.py +++ b/tests/login/test_login_redirect.py @@ -91,10 +91,10 @@ def test_valid_redirect_base(app, client, idp, get_value_from_discovery_doc_patc """ if idp == "fence": mocked_generate_authorize_redirect = MagicMock( - return_value=("authorization_url", "state") + return_value={"url": "authorization_url", "state": "state"} ) mock = patch( - f"flask.current_app.fence_client.generate_authorize_redirect", + f"authlib.integrations.flask_client.apps.FlaskOAuth2App.create_authorization_url", mocked_generate_authorize_redirect, ).start() diff --git a/tests/migrations/README.md b/tests/migrations/README.md new file mode 100644 index 000000000..e2c3eb999 --- /dev/null +++ b/tests/migrations/README.md @@ -0,0 +1,7 @@ +## Migration Tests + +These tests are designed to test pre/post behavior of database migrations and making sure the changes are working as intended. + +Currently we only have upgrade tests because the latest version of authlib has undergone major changes and will *not* work with previous versions of database schema. + +For client class details, see fence/models.py diff --git a/tests/migrations/test_9b3a5a7145d7.py b/tests/migrations/test_9b3a5a7145d7.py new file mode 100644 index 000000000..f37fadf5d --- /dev/null +++ b/tests/migrations/test_9b3a5a7145d7.py @@ -0,0 +1,139 @@ +""" +"Non-unique client name" migration +""" + +from alembic.config import main as alembic_main +import pytest +from sqlalchemy.exc import IntegrityError +from sqlalchemy import inspect + +from fence.models import Client +from fence.utils import random_str +import bcrypt + + +@pytest.fixture(scope="function", autouse=True) +def post_test_clean_up(app): + yield + + # clean up the client table + with app.db.session as db_session: + db_session.query(Client).delete() + + # go back to the latest state of the DB + alembic_main(["--raiseerr", "upgrade", "head"]) + + +def test_upgrade(app): + """ + Test Adding Client after performing Alembic Upgrade to this revision + Add foreign key constraint client_id for google_service_account and make sure it gets removed after migration + """ + + alembic_main( + ["--raiseerr", "downgrade", "a04a70296688"] + ) # pragma: allowlist secret + + with app.db.session as db_session: + inspector = inspect(app.db.engine) + foreign_keys = inspector.get_foreign_keys("google_service_account") + constraint_exists = any( + fk["constrained_columns"] == ["client_id"] for fk in foreign_keys + ) + if constraint_exists: + db_session.execute( + """ + ALTER TABLE google_service_account + DROP CONSTRAINT google_service_account_client_id_fkey; + """ + ) + db_session.commit() + + db_session.execute( + """ + ALTER TABLE google_service_account + ADD CONSTRAINT google_service_account_client_id_fkey + FOREIGN KEY (client_id) + REFERENCES client(client_id); + """ + ) + db_session.commit() + + alembic_main(["--raiseerr", "upgrade", "9b3a5a7145d7"]) # pragma: allowlist secret + + with app.db.session as db_session: + inspector = inspect(app.db.engine) + foreign_keys = inspector.get_foreign_keys("google_service_account") + constraint_exists = any( + fk["constrained_columns"] == ["client_id"] for fk in foreign_keys + ) + assert constraint_exists == False + + client_name = "client_name" + url = "https://oauth-client.net" + client_id = "test-client" + client_secret = random_str(50) + hashed_secret = bcrypt.hashpw( + client_secret.encode("utf-8"), bcrypt.gensalt() + ).decode("utf-8") + grant_types = ["refresh_token"] + allowed_scopes = ["openid", "user", "fence"] + with app.db.session as db_session: + db_session.add( + Client( + client_id=client_id, + client_secret=hashed_secret, + allowed_scopes=allowed_scopes, + redirect_uris=[url], + description="", + is_confidential=True, + name=client_name, + grant_types=grant_types, + ) + ) + db_session.commit() + query_result = db_session.query(Client).all() + + # make sure the client was created and the new _client_metadata field is populated and Authlib getters are working + assert len(query_result) == 1, query_result + assert query_result[0].name == client_name + assert query_result[0].client_secret == hashed_secret + assert query_result[0].scope == " ".join(allowed_scopes) + assert query_result[0].grant_types == grant_types + assert query_result[0].redirect_uris == [url] + + +def test_upgrade_without_fk_constraint(app): + """ + If foreign key constraint does not exists, make sure no changes are made to google_service_account table + """ + # Make sure we start with a previous version of alembic + alembic_main( + ["--raiseerr", "downgrade", "a04a70296688"] # pragma: allowlist secret + ) + + with app.db.session as db_session: + inspector = inspect(app.db.engine) + foreign_keys = inspector.get_foreign_keys("google_service_account") + constraint_exists = any( + fk["constrained_columns"] == ["client_id"] for fk in foreign_keys + ) + + if constraint_exists: + db_session.execute( + """ + ALTER TABLE google_service_account + DROP CONSTRAINT google_service_account_client_id_fkey; + """ + ) + db_session.commit() + + alembic_main(["--raiseerr", "upgrade", "9b3a5a7145d7"]) # pragma: allowlist secret + + with app.db.session as db_session: + inspector = inspect(app.db.engine) + foreign_keys = inspector.get_foreign_keys("google_service_account") + constraint_exists = any( + fk["constrained_columns"] == ["client_id"] for fk in foreign_keys + ) + assert constraint_exists == False diff --git a/tests/migrations/test_a04a70296688.py b/tests/migrations/test_a04a70296688.py index ab3560420..bf1e8b6dc 100644 --- a/tests/migrations/test_a04a70296688.py +++ b/tests/migrations/test_a04a70296688.py @@ -23,36 +23,12 @@ def post_test_clean_up(app): def test_upgrade(app): - # state before migration - alembic_main(["--raiseerr", "downgrade", "ea7e1b843f82"]) + # This is the last version our current codebase will work with + alembic_main(["--raiseerr", "upgrade", "9b3a5a7145d7"]) # pragma: allowlist secret client_name = "non_unique_client_name" - # before the migration, it should not be possible to create 2 clients - # with the same name - with app.db.session as db_session: - db_session.add( - Client( - name=client_name, - client_id="client_id1", - grant_types="client_credentials", - ) - ) - db_session.add( - Client( - name=client_name, - client_id="client_id2", - grant_types="client_credentials", - ) - ) - with pytest.raises(IntegrityError): - db_session.commit() - db_session.rollback() - - # run the upgrade migration - alembic_main(["--raiseerr", "upgrade", "a04a70296688"]) - - # now it should be possible + # It should be possible to add 2 clients of the same name with app.db.session as db_session: db_session.add( Client( @@ -75,74 +51,3 @@ def test_upgrade(app): assert len(query_result) == 2, query_result assert query_result[0].name == client_name assert query_result[1].name == client_name - - -@pytest.mark.parametrize("expirations", [[1, 100], [0, 0], [0, 100]]) -def test_downgrade(app, expirations): - """ - Test the downgrade with the following expiration values: - - 1 and 100: we keep the row with the highest expiration (100) - - 0 and 0: both rows have no expiration: we keep any of the 2 - - 0 and 100: we keep the row that has an expiration (100) - """ - # state after migration - alembic_main(["--raiseerr", "downgrade", "a04a70296688"]) - - client_name = "non_unique_client_name" - - # it should be possible to create 2 clients with the same name - with app.db.session as db_session: - db_session.add( - Client( - name=client_name, - client_id="client_id1", - grant_types="client_credentials", - expires_in=expirations[0], - ) - ) - db_session.add( - Client( - name=client_name, - client_id="client_id2", - grant_types="client_credentials", - expires_in=expirations[1], - ) - ) - db_session.commit() - query_result = db_session.query(Client).all() - - assert len(query_result) == 2, query_result - assert query_result[0].name == client_name - expires_at1 = query_result[0].expires_at - assert query_result[1].name == client_name - expires_at2 = query_result[1].expires_at - - # run the downgrade migration - alembic_main(["--raiseerr", "downgrade", "ea7e1b843f82"]) - - # the duplicate row with the lowest expiration should have been deleted - with app.db.session as db_session: - query_result = db_session.query(Client).all() - assert len(query_result) == 1, query_result - assert query_result[0].name == client_name - assert query_result[0].expires_at == max(expires_at1, expires_at2) - - # now it should not be possible anymore to create 2 clients with the same name - with app.db.session as db_session: - db_session.add( - Client( - name=client_name, - client_id="client_id1", - grant_types="client_credentials", - ) - ) - db_session.add( - Client( - name=client_name, - client_id="client_id2", - grant_types="client_credentials", - ) - ) - with pytest.raises(IntegrityError): - db_session.commit() - db_session.rollback() diff --git a/tests/migrations/test_ea7e1b843f82.py b/tests/migrations/test_ea7e1b843f82.py deleted file mode 100644 index 57c9e129d..000000000 --- a/tests/migrations/test_ea7e1b843f82.py +++ /dev/null @@ -1,122 +0,0 @@ -""" -"Optional Client.redirect_uri" migration -""" - -from alembic.config import main as alembic_main -import pytest -from sqlalchemy.exc import IntegrityError - -from fence.models import Client - - -@pytest.fixture(scope="function", autouse=True) -def post_test_clean_up(app): - yield - - # clean up the client table - with app.db.session as db_session: - db_session.query(Client).delete() - - # go back to the latest state of the DB - alembic_main(["--raiseerr", "upgrade", "head"]) - - -def test_upgrade(app): - # state before migration - alembic_main(["--raiseerr", "downgrade", "e4c7b0ab68d3"]) - - # before the migration, it should not be possible to create a client - # without a redirect_uri - with app.db.session as db_session: - with pytest.raises(IntegrityError): - db_session.add( - Client( - client_id="client_without_redirect_uri", - name="client_without_redirect_uri_name", - grant_types="client_credentials", - ) - ) - db_session.commit() - db_session.rollback() - - # run the upgrade migration - alembic_main(["--raiseerr", "upgrade", "ea7e1b843f82"]) - - # now it should be possible - with app.db.session as db_session: - db_session.add( - Client( - client_id="client_without_redirect_uri", - name="client_without_redirect_uri_name", - grant_types="client_credentials", - ) - ) - db_session.commit() - query_result = db_session.query(Client).all() - - # make sure the client was created - assert len(query_result) == 1, query_result - assert query_result[0].client_id == "client_without_redirect_uri" - assert query_result[0].redirect_uri == None - - -def test_downgrade(app): - # state after migration - alembic_main(["--raiseerr", "downgrade", "ea7e1b843f82"]) - - with app.db.session as db_session: - # it should possible to create a client without a redirect_uri - db_session.add( - Client( - client_id="client_without_redirect_uri", - name="client_without_redirect_uri_name", - grant_types="client_credentials", - ) - ) - # also create a client with a redirect_uri - db_session.add( - Client( - client_id="client_with_redirect_uri", - name="client_with_redirect_uri_name", - grant_types="client_credentials", - redirect_uri="http://localhost/redirect", - ) - ) - query_result = db_session.query(Client).all() - - # make sure the clients were created - assert len(query_result) == 2, query_result - - client_without_redirect_uri = [ - c for c in query_result if c.client_id == "client_without_redirect_uri" - ] - assert len(client_without_redirect_uri) == 1 - assert client_without_redirect_uri[0].redirect_uri == None - - client_with_redirect_uri = [ - c for c in query_result if c.client_id == "client_with_redirect_uri" - ] - assert len(client_with_redirect_uri) == 1 - assert client_with_redirect_uri[0].redirect_uri == "http://localhost/redirect" - - # run the downgrade migration - alembic_main(["--raiseerr", "downgrade", "e4c7b0ab68d3"]) - - with app.db.session as db_session: - query_result = db_session.query(Client).all() - assert len(query_result) == 2, query_result - - # make sure the client without redirect was migrated to have an empty - # string as redirect_uri instead of null - client_without_redirect_uri = [ - c for c in query_result if c.client_id == "client_without_redirect_uri" - ] - assert len(client_without_redirect_uri) == 1 - assert client_without_redirect_uri[0].redirect_uri == "" - - # make sure the client with redirect is unchanged - client_with_redirect_uri = [ - c for c in query_result if c.client_id == "client_with_redirect_uri" - ] - assert len(client_with_redirect_uri) == 1 - assert client_with_redirect_uri[0].redirect_uri == "http://localhost/redirect" diff --git a/tests/oidc/core/token/test_validation.py b/tests/oidc/core/token/test_validation.py index b24f96990..d600406a5 100644 --- a/tests/oidc/core/token/test_validation.py +++ b/tests/oidc/core/token/test_validation.py @@ -35,7 +35,7 @@ def test_reuse_code_invalid(oauth_test_client): """ Test that an authorization code returned from the authorization endpoint can be used only once, and after that its attempted usage will return an - ``invalid_request`` error. + ``invalid_grant`` error. """ code = oauth_test_client.authorize(data={"confirm": "yes"}).code # Test that the first time using the code is fine. @@ -45,20 +45,20 @@ def test_reuse_code_invalid(oauth_test_client): response = oauth_test_client.token_response.response assert response.status_code == 400 assert "error" in response.json - assert response.json["error"] == "invalid_request" + assert response.json["error"] == "invalid_grant" def test_different_client_invalid(oauth_test_client, oauth_test_client_B): """ Test that one client cannot use an authorization code which was issued to a - different client, and the request fails with ``invalid_request``. + different client, and the request fails with ``invalid_grant``. """ code = oauth_test_client.authorize(data={"confirm": "yes"}).code # Have client B send the code to the token endpoint. response = oauth_test_client_B.token(code=code, do_asserts=False).response assert response.status_code == 400 assert "error" in response.json - assert response.json["error"] == "invalid_request" + assert response.json["error"] == "invalid_grant" def test_invalid_code(oauth_test_client): @@ -69,27 +69,27 @@ def test_invalid_code(oauth_test_client): response = oauth_test_client.token(code=code, do_asserts=False).response assert response.status_code == 400 assert "error" in response.json - assert response.json["error"] == "invalid_request" + assert response.json["error"] == "invalid_grant" def test_invalid_redirect_uri(oauth_test_client): """ Test that if the token request has a different redirect_uri than the one the client is suppsed to be using that an error is raised, with the - ``invalid_request`` code. + ``invalid_grant`` code. """ oauth_test_client.authorize(data={"confirm": "yes"}) data = {"redirect_uri": oauth_test_client.url + "/some-garbage"} response = oauth_test_client.token(data=data, do_asserts=False).response assert response.status_code == 400 assert "error" in response.json - assert response.json["error"] == "invalid_request" + assert response.json["error"] == "invalid_grant" def test_no_redirect_uri(client, oauth_test_client): """ Test that if the token request has no ``redirect_uri`` that an error is - raised, with the ``invalid_request`` code. + raised, with the ``invalid_grant`` code. """ code = oauth_test_client.authorize(data={"confirm": "yes"}).code headers = oauth_test_client._auth_header @@ -105,4 +105,4 @@ def test_no_redirect_uri(client, oauth_test_client): ) assert token_response.status_code == 400 assert "error" in token_response.json - assert token_response.json["error"] == "invalid_request" + assert token_response.json["error"] == "invalid_grant" diff --git a/tests/rfc6749/test_oauth2.py b/tests/rfc6749/test_oauth2.py index 11487d3e6..89879b550 100644 --- a/tests/rfc6749/test_oauth2.py +++ b/tests/rfc6749/test_oauth2.py @@ -28,7 +28,8 @@ def test_oauth2_authorize_incorrect_scope(oauth_test_client, method): auth_response = oauth_test_client.authorize( method=method, data=data, do_asserts=False ) - assert auth_response.response.status_code == 401 + # Check the status code is not a redirect code 3xx + assert str(auth_response.response.status_code)[0] != "3" @pytest.mark.parametrize("method", ["GET", "POST"]) diff --git a/tests/scripting/test_fence-create.py b/tests/scripting/test_fence-create.py index a6be9319a..deae90d34 100644 --- a/tests/scripting/test_fence-create.py +++ b/tests/scripting/test_fence-create.py @@ -113,7 +113,7 @@ def test_create_client_inits_default_allowed_scopes(db_session): def to_test(): saved_client = db_session.query(Client).filter_by(name=client_name).first() - assert saved_client._allowed_scopes == " ".join(config["CLIENT_ALLOWED_SCOPES"]) + assert saved_client.scope == " ".join(config["CLIENT_ALLOWED_SCOPES"]) create_client_action_wrapper( to_test, @@ -131,7 +131,7 @@ def test_create_client_inits_passed_allowed_scopes(db_session): def to_test(): saved_client = db_session.query(Client).filter_by(name=client_name).first() - assert saved_client._allowed_scopes == "openid user data" + assert saved_client.scope == "openid user data" create_client_action_wrapper( to_test, @@ -150,7 +150,7 @@ def test_create_client_adds_openid_when_not_in_allowed_scopes(db_session): def to_test(): saved_client = db_session.query(Client).filter_by(name=client_name).first() - assert saved_client._allowed_scopes == "user data openid" + assert saved_client.scope == "user data openid" create_client_action_wrapper( to_test, @@ -490,7 +490,7 @@ def test_client_rotate(db_session): for attr in [ "user", "redirect_uris", - "_allowed_scopes", + "scope", "description", "auto_approve", "grant_types", @@ -537,21 +537,23 @@ def test_client_rotate_and_actions(db_session, capsys): capsys.readouterr() # clear the buffer list_client_action(db_session) captured_logs = str(capsys.readouterr()) - assert captured_logs.count("'name': 'client_abc'") == 3 + assert captured_logs.count("'name\\': \\'client_abc\\'") == 3 for i in range(3): - assert captured_logs.count(f"'client_id': '{clients[i].client_id}'") == 1 + assert ( + captured_logs.count(f"\\'client_id\\': \\'{clients[i].client_id}\\'") == 1 + ) # check that `modify_client_action` updates all the rows description = "new description" url2 = "new url" modify_client_action( - db_session, client_name, description=description, urls=[url2], append=True + config["DB"], client_name, description=description, urls=[url2], append=True ) clients = db_session.query(Client).filter_by(name=client_name).all() assert len(clients) == 3 for i in range(3): assert clients[i].description == description - assert clients[i].redirect_uri == f"{url1}\n{url2}" + assert clients[i].redirect_uris == [url1, url2] # check that `delete_client_action` deletes all the rows delete_client_action(config["DB"], client_name) @@ -675,7 +677,7 @@ def test_create_refresh_token_with_found_user( BASE_URL = config["BASE_URL"] scopes = "openid,user" expires_in = 3600 - + client_id = "test-client" user = User(username=username) db_session.add(user) @@ -689,6 +691,7 @@ def test_create_refresh_token_with_found_user( scopes=scopes, expires_in=expires_in, private_key=rsa_private_key, + client_id=client_id, ).create_refresh_token() refresh_token_response = oauth_test_client.refresh( @@ -1717,7 +1720,7 @@ def test_modify_client_action_modify_allowed_scopes(db_session): client_id=client_id, client_secret="secret", # pragma: allowlist secret name=client_name, - _allowed_scopes="openid user data", + allowed_scopes="openid user data", user=User(username="client_user"), redirect_uris=["localhost"], ) @@ -1736,7 +1739,7 @@ def test_modify_client_action_modify_allowed_scopes(db_session): assert client.auto_approve == True assert client.name == "test321" assert client.description == "test client" - assert client._allowed_scopes == "openid user test" + assert client.scope == "openid user test" assert client.redirect_uris == ["test"] @@ -1747,7 +1750,7 @@ def test_modify_client_action_modify_allowed_scopes_append_true(db_session): client_id=client_id, client_secret="secret", # pragma: allowlist secret name=client_name, - _allowed_scopes="openid user data", + allowed_scopes="openid user data", user=User(username="client_user"), redirect_uris=["localhost"], ) @@ -1766,9 +1769,7 @@ def test_modify_client_action_modify_allowed_scopes_append_true(db_session): assert client.auto_approve == True assert client.name == "test321" assert client.description == "test client" - assert ( - client._allowed_scopes == "openid user data new_scope new_scope_2 new_scope_3" - ) + assert client.scope == "openid user data new_scope new_scope_2 new_scope_3" def test_modify_client_action_modify_append_url(db_session): @@ -1778,7 +1779,7 @@ def test_modify_client_action_modify_append_url(db_session): client_id=client_id, client_secret="secret", # pragma: allowlist secret name=client_name, - _allowed_scopes="openid user data", + allowed_scopes="openid user data", user=User(username="client_user"), redirect_uris="abcd", ) diff --git a/tests/session/test_session.py b/tests/session/test_session.py index 387dd7e98..cf8c812b0 100644 --- a/tests/session/test_session.py +++ b/tests/session/test_session.py @@ -24,8 +24,7 @@ def test_session_cookie_creation(app): with app.test_client() as client: with client.session_transaction(): pass - - client_cookies = [cookie.key for cookie in client.cookie_jar] + client_cookies = client.get_cookie(config["SESSION_COOKIE_NAME"]) assert not client_cookies @@ -36,15 +35,9 @@ def test_session_cookie_creation_session_modified(app): with client.session_transaction() as session: session["username"] = "Captain Janeway" - client_cookies = [cookie.key for cookie in client.cookie_jar] - assert config["SESSION_COOKIE_NAME"] in client_cookies - session_cookie = [ - cookie - for cookie in client.cookie_jar - if cookie.key == config["SESSION_COOKIE_NAME"] - ] - assert len(session_cookie) == 1 - assert session_cookie[0].value # Make sure it's not empty + session_cookie = client.get_cookie(config["SESSION_COOKIE_NAME"]) + assert session_cookie + assert session_cookie.value # Make sure it's not empty def test_valid_session(app): @@ -58,8 +51,8 @@ def test_valid_session(app): # the username with app.test_client() as client: # manually set cookie for initial session + # domain is set to localhost be default client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, @@ -82,7 +75,6 @@ def test_valid_session_modified(app): with app.test_client() as client: # manually set cookie for initial session client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, @@ -112,7 +104,6 @@ def test_expired_session_lifetime(app): with app.test_client() as client: # manually set cookie for initial session client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, @@ -144,7 +135,6 @@ def test_expired_session_timeout(app): with app.test_client() as client: # manually set cookie for initial session client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, @@ -168,9 +158,8 @@ def test_session_cleared(app): with app.test_client() as client: # manually set cookie for initial session client.set_cookie( - "localhost", - config["SESSION_COOKIE_NAME"], - test_session_jwt, + key=config["SESSION_COOKIE_NAME"], + value=test_session_jwt, httponly=True, samesite="Lax", ) @@ -178,8 +167,8 @@ def test_session_cleared(app): session["username"] = username session.clear() assert session.get("username") != username - client_cookies = [cookie.key for cookie in client.cookie_jar] - assert config["SESSION_COOKIE_NAME"] not in client_cookies + client_cookie = client.get_cookie(config["SESSION_COOKIE_NAME"]) + assert not client_cookie def test_invalid_session_cookie(app): @@ -190,7 +179,6 @@ def test_invalid_session_cookie(app): with app.test_client() as client: # manually set cookie for initial session client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, @@ -234,14 +222,12 @@ def test_valid_session_valid_access_token( with app.test_client() as client: # manually set cookie for initial session client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, samesite="Lax", ) client.set_cookie( - "localhost", config["ACCESS_TOKEN_COOKIE_NAME"], test_access_jwt, httponly=True, @@ -287,14 +273,12 @@ def test_valid_session_valid_access_token_diff_user( with app.test_client() as client: # manually set cookie for initial session client.set_cookie( - "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, samesite="Lax", ) client.set_cookie( - "localhost", config["ACCESS_TOKEN_COOKIE_NAME"], test_access_jwt, httponly=True, diff --git a/tests/test-fence-config.yaml b/tests/test-fence-config.yaml index 086c4f6ac..38ccbd147 100755 --- a/tests/test-fence-config.yaml +++ b/tests/test-fence-config.yaml @@ -28,7 +28,7 @@ BASE_URL: 'http://localhost/user' # postgres db to connect to # connection url format: # postgresql://[user[:password]@][netloc][:port][/dbname] -DB: 'postgresql://postgres:postgres@localhost:5432/fence_test_tmp' +DB: 'postgresql://postgres:postgres@localhost:5432/postgres' # A URL-safe base64-encoded 32-byte key for encrypting keys in db # in python you can use the following script to generate one: @@ -120,6 +120,7 @@ OPENID_CONNECT: client_secret: '' redirect_url: '' fence: + name: 'fence IDP' client_id: '' client_secret: '' redirect_url: '{{BASE_URL}}/login/fence/login' @@ -232,16 +233,9 @@ LOGIN_OPTIONS: idp: generic2 # ////////////////////////////////////////////////////////////////////////////////////// -# LIBRARY CONFIGURATION (authlib & flask) +# LIBRARY CONFIGURATION (flask) # - Already contains reasonable defaults # ////////////////////////////////////////////////////////////////////////////////////// -# authlib-specific configs for OIDC flow and JWTs -# NOTE: the OAUTH2_JWT_KEY cfg gets set automatically by fence if keys are setup -# correctly -OAUTH2_JWT_ALG: 'RS256' -OAUTH2_JWT_ENABLED: true -OAUTH2_JWT_ISS: '{{BASE_URL}}' -OAUTH2_PROVIDER_ERROR_URI: '/api/oauth2/errors' # used for flask, "path mounted under by the application / web server" # since we deploy as microservices, fence is typically under {{base}}/user diff --git a/tests/test_app_config.py b/tests/test_app_config.py index 3c3af9fd0..ec7b7b8b7 100755 --- a/tests/test_app_config.py +++ b/tests/test_app_config.py @@ -81,7 +81,6 @@ def test_app_config(): {"patch_name": "fence.app_sessions"}, {"patch_name": "fence.app_register_blueprints"}, {"patch_name": "fence.oidc.oidc_server.OIDCServer.init_app"}, - {"patch_name": "fence._setup_prometheus"}, { "patch_name": "fence.resources.storage.StorageManager.__init__", "return_value": None, diff --git a/tests/test_logout.py b/tests/test_logout.py index 49df98c6a..eb0f8f538 100644 --- a/tests/test_logout.py +++ b/tests/test_logout.py @@ -78,9 +78,10 @@ def test_logout_fence(app, client, user_with_fence_provider, monkeypatch): with mock.patch("fence.allowed_login_redirects", return_value={"some_site.com"}): # manually set cookie for initial session client.set_cookie( - "localhost", - config["SESSION_COOKIE_NAME"], - test_session_jwt, + key=config["SESSION_COOKIE_NAME"], + value=test_session_jwt, + # domain is used in client.get_cookie, it defaults to locahost anyway + domain="localhost", httponly=True, samesite="Lax", ) diff --git a/tests/test_audit_service.py b/tests/test_metrics.py similarity index 71% rename from tests/test_audit_service.py rename to tests/test_metrics.py index b1d6568d9..be7d6b2ab 100644 --- a/tests/test_audit_service.py +++ b/tests/test_metrics.py @@ -1,9 +1,15 @@ """ +Tests for the metrics features (Audit Service and Prometheus) + Tests for the Audit Service integration: - test the creation of presigned URL audit logs - test the creation of login audit logs - test the SQS flow +In Audit Service tests where it makes sense, we also test that Prometheus +metrics are created as expected. The last section tests Prometheus metrics +independently. + Note 1: there is no test for the /oauth2 endpoint: the /oauth2 endpoint should redirect the user to the /login endpoint (tested in `test_redirect_oauth2_authorize`), and the login endpoint should @@ -16,7 +22,6 @@ tests looking at users are not affected. """ - import boto3 import flask import json @@ -27,11 +32,17 @@ from unittest.mock import ANY, MagicMock, patch import fence +from fence.metrics import metrics from fence.config import config +from fence.blueprints.data.indexd import get_bucket_from_urls +from fence.models import User from fence.resources.audit.utils import _clean_authorization_request_url from tests import utils from tests.conftest import LOGIN_IDPS +# `reset_prometheus_metrics` must be imported even if not used so the autorun fixture gets triggered +from tests.utils.metrics import assert_prometheus_metrics, reset_prometheus_metrics + def test_clean_authorization_request_url(): """ @@ -111,6 +122,7 @@ def __init__(self, data, status_code=200): @pytest.mark.parametrize("protocol", ["gs", None]) def test_presigned_url_log( endpoint, + prometheus_metrics_before, protocol, client, user_client, @@ -126,7 +138,7 @@ def test_presigned_url_log( """ Get a presigned URL from Fence and make sure a call to the Audit Service was made to create an audit log. Test with and without a requested - protocol. + protocol. Also check that a prometheus metric is created. """ mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": True}, 200)}}) audit_service_mocker = mock.patch( @@ -142,7 +154,7 @@ def test_presigned_url_log( else: path = f"/ga4gh/drs/v1/objects/{guid}/access/{protocol or 's3'}" resource_paths = ["/my/resource/path1", "/path2"] - indexd_client_with_arborist(resource_paths) + record = indexd_client_with_arborist(resource_paths)["record"] headers = { "Authorization": "Bearer " + jwt.encode( @@ -183,6 +195,39 @@ def test_presigned_url_log( }, ) + # check prometheus metrics + resp = client.get("/metrics") + assert resp.status_code == 200 + bucket = get_bucket_from_urls(record["urls"], expected_protocol) + size_in_kibibytes = record["size"] / 1024 + expected_metrics = [ + { + "name": "gen3_fence_presigned_url_total", + "labels": { + "action": "download", + "authz": resource_paths, + "bucket": bucket, + "drs": endpoint == "ga4gh-drs", + "protocol": expected_protocol, + "user_sub": user_client.user_id, + }, + "value": 1.0, + }, + { + "name": "gen3_fence_presigned_url_size", + "labels": { + "action": "download", + "authz": resource_paths, + "bucket": bucket, + "drs": endpoint == "ga4gh-drs", + "protocol": expected_protocol, + "user_sub": user_client.user_id, + }, + "value": size_in_kibibytes, + }, + ] + assert_prometheus_metrics(prometheus_metrics_before, resp.text, expected_metrics) + @pytest.mark.parametrize( "indexd_client_with_arborist", ["s3_and_gs_acl_no_authz"], indirect=True @@ -411,10 +456,11 @@ def test_login_log_login_endpoint( rsa_private_key, db_session, # do not remove :-) See note at top of file monkeypatch, + prometheus_metrics_before, ): """ Test that logging in via any of the existing IDPs triggers the creation - of a login audit log. + of a login audit log and of a prometheus metric. """ mock_arborist_requests() audit_service_mocker = mock.patch( @@ -450,7 +496,7 @@ def test_login_log_login_endpoint( elif idp == "fence": mocked_fetch_access_token = MagicMock(return_value={"id_token": jwt_string}) patch( - f"flask.current_app.fence_client.fetch_access_token", + f"authlib.integrations.flask_client.apps.FlaskOAuth2App.fetch_access_token", mocked_fetch_access_token, ).start() mocked_validate_jwt = MagicMock( @@ -490,16 +536,17 @@ def test_login_log_login_endpoint( data={}, status_code=201, ) - path = f"/login/{idp}/{callback_endpoint}" + path = f"/login/{idp}/{callback_endpoint}" # SEE fence/blueprints/login/fence_login.py L91 response = client.get(path, headers=headers) assert response.status_code == 200, response + user_sub = db_session.query(User).filter(User.username == username).first().id audit_service_requests.post.assert_called_once_with( "http://audit-service/log/login", json={ "request_url": path, "status_code": 200, "username": username, - "sub": ANY, + "sub": user_sub, "idp": idp_name, "fence_idp": None, "shib_idp": None, @@ -510,10 +557,27 @@ def test_login_log_login_endpoint( if get_auth_info_patch: get_auth_info_patch.stop() + # check prometheus metrics + resp = client.get("/metrics") + assert resp.status_code == 200 + expected_metrics = [ + { + "name": "gen3_fence_login_total", + "labels": {"idp": "all", "user_sub": user_sub}, + "value": 1.0, + }, + { + "name": "gen3_fence_login_total", + "labels": {"idp": idp_name, "user_sub": user_sub}, + "value": 1.0, + }, + ] + assert_prometheus_metrics(prometheus_metrics_before, resp.text, expected_metrics) + -########################## -# Push audit logs to SQS # -########################## +########################################## +# Audit Service - Push audit logs to SQS # +########################################## def mock_audit_service_sqs(app): @@ -638,3 +702,171 @@ def test_login_log_push_to_sqs( mocked_sqs.send_message.assert_called_once() get_auth_info_patch.stop() + + +###################### +# Prometheus metrics # +###################### + + +def test_disabled_prometheus_metrics(client, monkeypatch): + """ + When metrics gathering is not enabled, the metrics endpoint should not error, but it should + not return any data. + """ + monkeypatch.setitem(config, "ENABLE_PROMETHEUS_METRICS", False) + metrics.add_login_event( + user_sub="123", + idp="test_idp", + fence_idp="shib", + shib_idp="university", + client_id="test_azp", + ) + resp = client.get("/metrics") + assert resp.status_code == 200 + assert resp.text == "" + + +def test_record_prometheus_events(prometheus_metrics_before, client): + """ + Validate the returned value of the metrics endpoint before any event is logged, after an event + is logged, and after more events (one identical to the 1st one, and two different) are logged. + """ + # NOTE: To update later. The metrics utils don't support this yet. The gauges are not handled correctly. + # resp = client.get("/metrics") + # assert resp.status_code == 200 + # # no metrics have been recorded yet + # assert_prometheus_metrics(prometheus_metrics_before, resp.text, []) + + # record a login event and check that we get both a metric for the specific IDP, and an + # IDP-agnostic metric for the total number of login events. The latter should have no IDP + # information (no `fence_idp` or `shib_idp`). + metrics.add_login_event( + user_sub="123", + idp="test_idp", + fence_idp="shib", + shib_idp="university", + client_id="test_azp", + ) + resp = client.get("/metrics") + assert resp.status_code == 200 + expected_metrics = [ + { + "name": "gen3_fence_login_total", + "labels": { + "user_sub": "123", + "idp": "test_idp", + "fence_idp": "shib", + "shib_idp": "university", + "client_id": "test_azp", + }, + "value": 1.0, + }, + { + "name": "gen3_fence_login_total", + "labels": { + "user_sub": "123", + "idp": "all", + "fence_idp": "None", + "shib_idp": "None", + "client_id": "test_azp", + }, + "value": 1.0, + }, + ] + assert_prometheus_metrics(prometheus_metrics_before, resp.text, expected_metrics) + + # same login: should increase the existing counter by 1 + metrics.add_login_event( + user_sub="123", + idp="test_idp", + fence_idp="shib", + shib_idp="university", + client_id="test_azp", + ) + # login with different IDP labels: should create a new metric + metrics.add_login_event( + user_sub="123", + idp="another_idp", + fence_idp=None, + shib_idp=None, + client_id="test_azp", + ) + # new signed URL event: should create a new metric + metrics.add_signed_url_event( + action="upload", + protocol="s3", + acl=None, + authz=["/test/path"], + bucket="s3://test-bucket", + user_sub="123", + client_id="test_azp", + drs=True, + size_in_kibibytes=1.2, + ) + resp = client.get("/metrics") + assert resp.status_code == 200 + expected_metrics = [ + { + "name": "gen3_fence_login_total", + "labels": { + "user_sub": "123", + "idp": "all", + "fence_idp": "None", + "shib_idp": "None", + "client_id": "test_azp", + }, + "value": 3.0, # recorded login events since the beginning of the test + }, + { + "name": "gen3_fence_login_total", + "labels": { + "user_sub": "123", + "idp": "test_idp", + "fence_idp": "shib", + "shib_idp": "university", + "client_id": "test_azp", + }, + "value": 2.0, # recorded login events for this idp, fence_idp and shib_idp combo + }, + { + "name": "gen3_fence_login_total", + "labels": { + "user_sub": "123", + "idp": "another_idp", + "fence_idp": "None", + "shib_idp": "None", + "client_id": "test_azp", + }, + "value": 1.0, # recorded login events for the different idp + }, + { + "name": "gen3_fence_presigned_url_total", + "labels": { + "user_sub": "123", + "action": "upload", + "protocol": "s3", + "authz": ["/test/path"], + "bucket": "s3://test-bucket", + "user_sub": "123", + "client_id": "test_azp", + "drs": True, + }, + "value": 1.0, # recorded presigned URL events + }, + { + "name": "gen3_fence_presigned_url_size", + "labels": { + "user_sub": "123", + "action": "upload", + "protocol": "s3", + "authz": ["/test/path"], + "bucket": "s3://test-bucket", + "user_sub": "123", + "client_id": "test_azp", + "drs": True, + }, + "value": 1.2, # presigned URL gauge with the file size as value + }, + ] + assert_prometheus_metrics(prometheus_metrics_before, resp.text, expected_metrics) diff --git a/tests/utils/metrics.py b/tests/utils/metrics.py new file mode 100644 index 000000000..0443589a0 --- /dev/null +++ b/tests/utils/metrics.py @@ -0,0 +1,223 @@ +""" +At the time of writing, Prometheus metrics out of the box can't be reset between each +unit test. To be able to write independent unit tests, we have to manually save the "previous +state" (see `prometheus_metrics_before` fixture) and compare it to the new state. This involves +manually parsing the "previous state" (a python object) and the "current state" (raw text) into +the same format so they can be compared: +{ "name": "", "labels": {}, "value": 0 } + +The utility functions below can be used to check that the expected metrics have been recorded, +while discarding any previous metrics. + +https://stackoverflow.com/questions/73198616/how-do-i-reset-a-prometheus-python-client-python-runtime-between-pytest-test-fun +""" + + +import os +import shutil + +import pytest + + +@pytest.fixture(autouse=True, scope="session") +def reset_prometheus_metrics(): + """ + Delete the prometheus files after all the tests have run. + Without this, when running the tests locally, we would keep reading the metrics from + previous test runs. + So why not run this in-between the unit tests instead of the `assert_prometheus_metrics` + logic? Because it doesn't work, the prometheus client also keeps the state, and the mismatch + causes errors. This only works when the client is reset too (new process) + """ + yield + + folder = os.environ["PROMETHEUS_MULTIPROC_DIR"] + for filename in os.listdir(folder): + file_path = os.path.join(folder, filename) + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + elif os.path.isdir(file_path): + shutil.rmtree(file_path) + except Exception as e: + print(f"Failed to delete Prometheus metrics file '{file_path}': {e}") + + +def _diff_new_metrics_from_old_metrics(new_metrics, old_metrics): + """ + Return a list of "current metrics" by comparing the "new metrics" (current state) to the "old metrics" (previous state). + + Input metric format example: { + 'gen3_fence_login_total{client_id="test_azp",fence_idp="shib",idp="test_idp",shib_idp="university",user_sub="123"}': 2.0, + 'gen3_fence_login_total{client_id="test_azp",fence_idp="None",idp="all",shib_idp="None",user_sub="123"}': 3.0, + } + + Functionality example: + old_metrics = { 'counter1': 2, 'counter2': 2, 'gauge1': 1 } + new_metrics = { 'counter1': 1, 'counter3': 1 } + Returned value = [ + ('counter1', 1) (difference between 2 and 1), + ('counter3', 1) + ] (counter2 and gauge1 omitted since they are not part of the current state) + + Args: + new_metrics (dict): format { : } + old_metrics (dict): format { : } + } + + Return: + list> + """ + + def metric_is_gauge(metric_name): + return not metric_name.endswith("_total") and not metric_name.endswith( + "_created" + ) + + diff = [] + for long_metric_name, old_value in old_metrics.items(): + # long_metric_name = metric name + labels (see example in docstring) + metric_name = long_metric_name.split("{")[0] + if long_metric_name not in new_metrics or metric_is_gauge(metric_name): + # ignore all old metrics that are not also present in the new metrics + continue + # the metric value generated by the current test is the difference between the previous + # value and the current value + val = new_metrics[long_metric_name] - old_value + if val != 0: + diff.append((long_metric_name, val)) + for long_metric_name, new_value in new_metrics.items(): + metric_name = long_metric_name.split("{")[0] + if metric_is_gauge(metric_name): # all gauge metrics must be listed + diff.append((long_metric_name, new_value)) + elif long_metric_name not in old_metrics: + diff.append((long_metric_name, new_value)) + return diff + + +def _parse_raw_metrics_to_dict(text_metric): + """ + Parse raw text metrics into a dictionary of metric (metric name + labels) to value, + ignoring lines that are not metrics. + + Args: + text_metric (str) + Example: + # TYPE gen3_fence_login_total counter + gen3_fence_login_total{idp="test_idp",shib_idp="university",user_sub="123"} 2.0 + # HELP gen3_fence_presigned_url_total Fence presigned urls + # TYPE gen3_fence_presigned_url_total counter + gen3_fence_presigned_url_total{client_id="test_azp",drs="True",user_sub="123"} 1.0 + + Return: + dict + Example: + { + "gen3_fence_login_total{idp="test_idp",shib_idp="university",user_sub="123"}": 2.0, + "gen3_fence_presigned_url_total{client_id="test_azp",drs="True",user_sub="123"}": 1.0, + } + """ + if not text_metric: + return {} + return { + " ".join(m.split(" ")[:-1]): float(m.split(" ")[-1]) + for m in text_metric.strip().split("\n") + if not m.startswith("#") + } + + +def _parse_raw_name_to_labels(text_metric_name): + """ + Parse a raw metric name into a name and a dict of labels. + + Example: + text_metric_name = `metric_name{param1="None",param2="upload",param3="['/test/path']"` + Returned value = { + "name": "metric_name", + "labels": { "param1": "None", "param2": "upload", "param3": "['/test/path']" } + } + + Args: + text_metric (str) + + Returns: + dict + """ + name = text_metric_name.split("{")[0] + labels = text_metric_name.split("{")[1].split("}")[0].split('",') + labels = {l.split("=")[0]: l.split("=")[1].strip('"') for l in labels} + return {"name": name, "labels": labels} + + +def assert_prometheus_metrics( + previous_text_metrics, current_text_metrics, expected_metrics +): + """ + Compare the previous state and the current state of prometheus metrics, and checks if the difference between the 2 is the same as the new metrics a test expects to have recorded. + + Expected: only provide labels we need to check for, the rest will be ignored + + Args: + previous_text_metrics (str): previous state of prometheus metrics + current_text_metrics (str): current state + Example `previous_text_metrics` or `current_text_metrics`: + # TYPE gen3_fence_login_total counter + gen3_fence_login_total{idp="test_idp",shib_idp="university",user_sub="123"} 2.0 + # HELP gen3_fence_presigned_url_total Fence presigned urls + # TYPE gen3_fence_presigned_url_total counter + gen3_fence_presigned_url_total{acl="None",action="upload",authz="['/test/path']",bucket="s3://test-bucket",client_id="test_azp",drs="True",protocol="s3",user_sub="123"} 1.0 + expected_metrics (list): the expected difference between previous state and current state. + Only provide the labels we need to check; omitted labels will be ignored even if they + are present in the current state. + Example: [ + { + 'name': 'gen3_fence_login_total', + 'labels': { + 'idp': 'test_idp', 'shib_idp': 'university', 'user_sub': '123' + }, + 'value': 2.0 + } + ] + """ + old_metrics = _parse_raw_metrics_to_dict(previous_text_metrics) + print("Old metrics:") + for k, v in old_metrics.items(): + print(f"- {k} = {v}") + + new_metrics = _parse_raw_metrics_to_dict(current_text_metrics) + print("Received metrics:") + for k, v in new_metrics.items(): + print(f"- {k} = {v}") + + diff_metrics = _diff_new_metrics_from_old_metrics(new_metrics, old_metrics) + current_metrics = [] + print("Diff:") + for (metric_name, val) in diff_metrics: + parsed_m = _parse_raw_name_to_labels(metric_name) + parsed_m["value"] = val + current_metrics.append(parsed_m) + print(f"- {parsed_m}") + + print("Expecting metrics:") + # check that for each metric+label combination, the value is identical to the expected value + for expected_m in expected_metrics: + found = False + print(f"- {expected_m}") + for current_m in current_metrics: # look for the right metric + if current_m["name"] != expected_m["name"]: + continue + # if the metric name is identical, check the labels + right_labels = True + for label_k, label_v in expected_m["labels"].items(): + if current_m["labels"].get(label_k) != str(label_v): + right_labels = False + break + # if both the name and the labels are identical, this is the right metric: + # check that the value is the same as expected + if right_labels: + assert ( + current_m["value"] == expected_m["value"] + ), f"Missing metric: {expected_m}" + found = True + break # we found the right metric and it has the right value: moving on + assert found, f"Missing metric: {expected_m}" diff --git a/travis/pg_hba.conf b/travis/pg_hba.conf deleted file mode 100644 index e080219fd..000000000 --- a/travis/pg_hba.conf +++ /dev/null @@ -1,10 +0,0 @@ -# This config file will be used for the Travis test run. -# -# The new PostgreSQL 13 changes some settings from what they originally were -# in Travis, so we'll set them back. In particular we want to enable -# passwordless authentication for connections to PostgreSQL. -# Source: https://github.com/NCI-GDC/psqlgraph/blob/94f315db2c039217752cba85d9c63988f2059317/travis/pg_hba.conf -local all postgres trust -local all all trust -host all all 127.0.0.1/32 trust -host all all ::1/128 trust diff --git a/travis/postgresql.conf b/travis/postgresql.conf deleted file mode 100644 index d3959e564..000000000 --- a/travis/postgresql.conf +++ /dev/null @@ -1,32 +0,0 @@ -# This config file will be used for PostgreSQL 13 because Travis doesn't -# have configurations set up for it yet. The most important part will be the -# ramfs storage location change. It also defaults to port 5433 so we need to -# change that back, too. -# Copied from https://github.com/NCI-GDC/psqlgraph/blob/94f315db2c039217752cba85d9c63988f2059317/travis/postgresql.conf -data_directory = '/var/ramfs/postgresql/13/main' -hba_file = '/etc/postgresql/13/main/pg_hba.conf' -ident_file = '/etc/postgresql/13/main/pg_ident.conf' -external_pid_file = '/var/run/postgresql/13-main.pid' -port = 5432 -max_connections = 255 -unix_socket_directories = '/var/run/postgresql' -ssl = on -ssl_cert_file = '/etc/ssl/certs/ssl-cert-snakeoil.pem' -ssl_key_file = '/etc/ssl/private/ssl-cert-snakeoil.key' -shared_buffers = 128MB -dynamic_shared_memory_type = posix -max_wal_size = 256MB -min_wal_size = 80MB -log_line_prefix = '%t ' -log_timezone = 'UTC' -cluster_name = '13/main' -stats_temp_directory = '/var/run/postgresql/13-main.pg_stat_tmp' -datestyle = 'iso, mdy' -timezone = 'UTC' -lc_messages = 'en_US.UTF-8' -lc_monetary = 'en_US.UTF-8' -lc_numeric = 'en_US.UTF-8' -lc_time = 'en_US.UTF-8' -default_text_search_config = 'pg_catalog.english' -include_dir = 'conf.d' -fsync = false