diff --git a/.github/workflows/release_prod.yml b/.github/workflows/release_prod.yml new file mode 100644 index 0000000..00b1e7d --- /dev/null +++ b/.github/workflows/release_prod.yml @@ -0,0 +1,304 @@ +name: CI at Main Branch +run-name: ${{ github.actor }} is deploying at main branch 🚀 +on: + push: + branches: + - main +env: + RELEASE_ID: rc-${{ github.ref_name }}-${{ github.run_id }} + NODE_VERSION: 16.x +jobs: + configure: + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} + steps: + - name: Checkout to repository + uses: actions/checkout@v3 + with: + repository: 'grnet/rciam-deploy-inv' + ref: 'master' + ssh-key: ${{ secrets.DEPLOY_READ_SECRET }} + path: 'inventory' + - name: Set matrix data + id: set-matrix + working-directory: inventory/rciam-metrics/files/all/tenants + # https://www.jitsejan.com/use-github-actions-with-json-file-as-matrix + run: | + ls + cat ./config.json + echo "matrix=$(jq -c . < ./config.json)" >> $GITHUB_OUTPUT + checkout: + runs-on: ubuntu-latest + needs: configure + strategy: + matrix: ${{ fromJson(needs.configure.outputs.matrix) }} + steps: + - run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event." + - run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!" + - run: echo "🔎 The name of your branch is ${{ github.ref_name }} and your repository is ${{ github.repository }}." + - name: Check out repository code + uses: actions/checkout@v3 + with: + path: 'metrics-app-${{matrix.tenant}}' + ref: 'main' + - run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner." + - run: echo "💡 The ${{ github.sha }} commit processing started." + - run: echo "🖥️ The workflow is now ready to test your code on the runner." + - name: List files in the repository + run: | + ls ${{ github.workspace }} + - name: Download playbook + uses: actions/checkout@v3 + with: + # Repository name with owner. For example, actions/checkout + # Default: ${{ github.repository }} + repository: 'rciam/rciam-deploy' + ref: 'master' + path: 'roles' + - name: Download inventory + uses: actions/checkout@v3 + with: + repository: 'grnet/rciam-deploy-inv' + ref: 'master' + ssh-key: ${{ secrets.DEPLOY_READ_SECRET }} + path: 'inventory' + - name: Run playbook (create react_config file) + uses: dawidd6/action-ansible-playbook@v2 + with: + # Required, playbook filepath + playbook: metricsservers.yml + # Optional, directory where playbooks live + directory: ./roles + key: ${{ secrets.DEPLOY_READ_SECRET }} + # Optional, encrypted vault password + vault_password: ${{secrets.VAULT_PASSWORD}} + options: | + --inventory ${{ github.workspace }}/inventory/rciam-metrics/hosts.ini + --tags rciam-metrics:config-local + -u debian + - name: List files in the repository + # The tenant specific config file, i.e. config.tenant.environment.json, becomes plain config.json + # because the frontend lives under its own directory/path + run: | + ls -la ${{ github.workspace }}/inventory/rciam-metrics/files + - name: Move tenant config file to config.json + # The tenant specific config file, i.e. config.tenant.environment.json, becomes plain config.json + run: | + mv ${{ github.workspace }}/inventory/rciam-metrics/files/config.${{ matrix.tenant }}.json ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/src/config.json + - name: List files in metrics-app-${{matrix.tenant}} javascript + # The tenant specific config file, i.e. config.tenant.environment.json, becomes plain config.json + run: | + ls -la ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/src + - name: Share artifact inside workflow (frontend) + uses: actions/upload-artifact@v3 + with: + name: react-application + path: | + ${{ github.workspace }}/metrics-app-${{matrix.tenant}} + - name: Use Node.js ${{ env.NODE_VERSION }} + uses: actions/setup-node@v3 + with: + node-version: ${{ env.NODE_VERSION }} + - name: Install dependencies + run: | + cd ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript; npm install + - name: Build React application + run: | + cd ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript; CI=false npm run build + # Share artifact inside workflow + - name: List files in the repository + run: | + ls ${{ github.workspace }}/metrics-app-${{matrix.tenant}} + - name: Create release branch + run: cd ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/; git checkout -b ${{ env.RELEASE_ID }} + - name: Initialize mandatory git config + working-directory: ./metrics-app-${{matrix.tenant}} + run: | + git config user.name "GitHub Actions" + git config user.email noreply@github.com + - name: Push changes + uses: ad-m/github-push-action@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + branch: ${{ env.RELEASE_ID }} + directory: ./metrics-app-${{matrix.tenant}} + - name: Share artifact inside workflow - ${{matrix.tenant}} + uses: actions/upload-artifact@v3 + with: + name: react-github-actions-build-${{matrix.tenant}} + path: | + ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/build + ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/CHANGELOG.md + ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/CHANGELOG.md + - name: Share artifact inside workflow - backend + uses: actions/upload-artifact@v3 + with: + name: react-github-actions-build-backend + path: | + ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/app + ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/requirements.txt + ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/CHANGELOG.md + - run: echo "🍏 This job's status is ${{ job.status }}." + frontend_release: + runs-on: ubuntu-latest + strategy: + matrix: ${{ fromJson(needs.configure.outputs.matrix) }} + # We specify that deploys needs to + # finish before we create a release + # Job outputs are available to all downstream jobs that depend on this job. + needs: [checkout, configure] + steps: + # Download previously shared build + - name: Get artifact + uses: actions/download-artifact@v3 + with: + path: ./metrics-app-${{matrix.tenant}} + name: react-github-actions-build-${{matrix.tenant}} + - name: List files + run: | + pwd + ls -la ./ + ls -la ./metrics-app-${{matrix.tenant}}/ + ls -la ./metrics-app-${{matrix.tenant}}/javascript + ls -la ./metrics-app-${{matrix.tenant}}/javascript/build + - name: Compress action step + uses: a7ul/tar-action@v1.1.0 + id: compress + # The frontend release is only the javascript/build + # We need to include all the files and directories since + # the action is not smart enough to under asterisk (*) + with: + command: c + cwd: ./metrics-app-${{matrix.tenant}}/javascript/build + files: | + asset-manifest.json + index.html + static + outPath: frontend-${{matrix.tenant}}-release-build.tar.gz + # Upload as an artifact of the current workflow + - name: Upload frontend build zip artifact + uses: actions/upload-artifact@v1 + with: + name: frontend-${{matrix.tenant}}-release-build.tar.gz + path: frontend-${{matrix.tenant}}-release-build.tar.gz + - name: Create Release + uses: ncipollo/release-action@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + artifacts: "frontend-${{matrix.tenant}}-release-build.tar.gz" + bodyFile: "./metrics-app-${{matrix.tenant}}/javascript/CHANGELOG.md" + tag: "frontend-${{matrix.tenant}}-${{ github.ref_name }}-${{ env.RELEASE_ID }}" + makeLatest: true + generateReleaseNotes: true + backend_release: + runs-on: ubuntu-latest + # We specify that deploys needs to + # finish before we create a release + # Job outputs are available to all downstream jobs that depend on this job. + needs: [checkout, configure] + steps: + # Download previously shared build + - name: Get artifact + uses: actions/download-artifact@v3 + with: + name: react-github-actions-build-backend + - name: Compress action step + uses: a7ul/tar-action@v1.1.0 + id: compress + with: + command: c + cwd: ./ + files: | + app/ + requirements.txt + outPath: backend-release-build.tar.gz + - name: Upload backend build zip artifact + uses: actions/upload-artifact@v1 + with: + name: backend-release-build.tar.gz + path: backend-release-build.tar.gz + - name: Create Release + uses: ncipollo/release-action@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + artifacts: "backend-release-build.tar.gz" + bodyFile: "CHANGELOG.md" + tag: "metrics-api-${{ github.ref_name }}-${{ env.RELEASE_ID }}" + makeLatest: true + generateReleaseNotes: true + deploy_backend: + runs-on: ubuntu-latest + # We specify that deploys needs to + # finish before we create a release + needs: backend_release + steps: + - name: Download playbook + uses: actions/checkout@v3 + with: + # Repository name with owner. For example, actions/checkout + # Default: ${{ github.repository }} + repository: 'rciam/rciam-deploy' + ref: 'master' + path: 'roles' + - name: Download inventory + uses: actions/checkout@v3 + with: + repository: 'grnet/rciam-deploy-inv' + ref: 'master' + ssh-key: ${{ secrets.DEPLOY_READ_SECRET }} + path: 'inventory' + - name: Run playbook (deploy rciam-metrics) with release metrics-api-${{ github.ref_name }}-${{ env.RELEASE_ID }} + uses: dawidd6/action-ansible-playbook@v2 + with: + # Required, playbook filepath + playbook: metricsservers.yml + # Optional, directory where playbooks live + directory: ./roles + key: ${{ secrets.DEPLOY_READ_SECRET }} + # Optional, encrypted vault password + vault_password: ${{secrets.VAULT_PASSWORD}} + options: | + --inventory ${{ github.workspace }}/inventory/rciam-metrics/hosts.ini + --tags rciam-metrics:deploy-backend + -u debian + --extra-vars "metrics_release=metrics-api-${{ github.ref_name }}-${{ env.RELEASE_ID }}" + deploy_frontend: + runs-on: ubuntu-latest + strategy: + matrix: ${{ fromJson(needs.configure.outputs.matrix) }} + # We specify that deploys needs to + # finish before we create a release + # Job outputs are available to all downstream jobs that depend on this job. + needs: [frontend_release, configure] + steps: + - name: Download playbook + uses: actions/checkout@v3 + with: + # Repository name with owner. For example, actions/checkout + # Default: ${{ github.repository }} + repository: 'rciam/rciam-deploy' + ref: 'master' + path: 'roles' + - name: Download inventory + uses: actions/checkout@v3 + with: + repository: 'grnet/rciam-deploy-inv' + ref: 'master' + ssh-key: ${{ secrets.DEPLOY_READ_SECRET }} + path: 'inventory' + - name: Run playbook (deploy rciam-metrics) with release frontend-${{matrix.tenant}}-${{ github.ref_name }}-${{ env.RELEASE_ID }} + uses: dawidd6/action-ansible-playbook@v2 + with: + # Required, playbook filepath + playbook: metricsservers.yml + # Optional, directory where playbooks live + directory: ./roles + key: ${{ secrets.DEPLOY_READ_SECRET }} + # Optional, encrypted vault password + vault_password: ${{secrets.VAULT_PASSWORD}} + options: | + --inventory ${{ github.workspace }}/inventory/rciam-metrics/hosts.ini + --tags rciam-metrics:deploy-frontend + -u debian + --extra-vars "metrics_release=frontend-${{matrix.tenant}}-${{ github.ref_name }}-${{ env.RELEASE_ID }} tenant_environment=${{matrix.tenant}}" \ No newline at end of file diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml new file mode 100644 index 0000000..539d0f1 --- /dev/null +++ b/.github/workflows/releases.yml @@ -0,0 +1,304 @@ +name: CI at Devel Branch +run-name: ${{ github.actor }} is deploying at devel branch 🚀 +on: + push: + branches: + - develop +env: + RELEASE_ID: rc-${{ github.ref_name }}-${{ github.run_id }} + NODE_VERSION: 16.x +jobs: + configure: + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} + steps: + - name: Checkout to repository + uses: actions/checkout@v3 + with: + repository: 'grnet/rciam-deploy-inv' + ref: 'master' + ssh-key: ${{ secrets.DEPLOY_READ_SECRET }} + path: 'inventory' + - name: Set matrix data + id: set-matrix + working-directory: inventory/rciam-metrics-dev/files/all/tenants + # https://www.jitsejan.com/use-github-actions-with-json-file-as-matrix + run: | + ls + cat ./config.json + echo "matrix=$(jq -c . < ./config.json)" >> $GITHUB_OUTPUT + checkout: + runs-on: ubuntu-latest + needs: configure + strategy: + matrix: ${{ fromJson(needs.configure.outputs.matrix) }} + steps: + - run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event." + - run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!" + - run: echo "🔎 The name of your branch is ${{ github.ref_name }} and your repository is ${{ github.repository }}." + - name: Check out repository code + uses: actions/checkout@v3 + with: + path: 'metrics-app-${{matrix.tenant}}' + ref: 'develop' + - run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner." + - run: echo "💡 The ${{ github.sha }} commit processing started." + - run: echo "🖥️ The workflow is now ready to test your code on the runner." + - name: List files in the repository + run: | + ls ${{ github.workspace }} + - name: Download playbook + uses: actions/checkout@v3 + with: + # Repository name with owner. For example, actions/checkout + # Default: ${{ github.repository }} + repository: 'rciam/rciam-deploy' + ref: 'devel' + path: 'roles' + - name: Download inventory + uses: actions/checkout@v3 + with: + repository: 'grnet/rciam-deploy-inv' + ref: 'master' + ssh-key: ${{ secrets.DEPLOY_READ_SECRET }} + path: 'inventory' + - name: Run playbook (create react_config file) + uses: dawidd6/action-ansible-playbook@v2 + with: + # Required, playbook filepath + playbook: metricsservers.yml + # Optional, directory where playbooks live + directory: ./roles + key: ${{ secrets.DEPLOY_READ_SECRET }} + # Optional, encrypted vault password + vault_password: ${{secrets.VAULT_PASSWORD_DEVEL}} + options: | + --inventory ${{ github.workspace }}/inventory/rciam-metrics-dev/hosts.ini + --tags rciam-metrics:config-local + -u debian + - name: List files in the repository + # The tenant specific config file, i.e. config.tenant.environment.json, becomes plain config.json + # because the frontend lives under its own directory/path + run: | + ls -la ${{ github.workspace }}/inventory/rciam-metrics-dev/files + - name: Move tenant config file to config.json + # The tenant specific config file, i.e. config.tenant.environment.json, becomes plain config.json + run: | + mv ${{ github.workspace }}/inventory/rciam-metrics-dev/files/config.${{ matrix.tenant }}.json ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/src/config.json + - name: List files in metrics-app-${{matrix.tenant}} javascript + # The tenant specific config file, i.e. config.tenant.environment.json, becomes plain config.json + run: | + ls -la ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/src + - name: Share artifact inside workflow (frontend) + uses: actions/upload-artifact@v3 + with: + name: react-application + path: | + ${{ github.workspace }}/metrics-app-${{matrix.tenant}} + - name: Use Node.js ${{ env.NODE_VERSION }} + uses: actions/setup-node@v3 + with: + node-version: ${{ env.NODE_VERSION }} + - name: Install dependencies + run: | + cd ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript; npm install + - name: Build React application + run: | + cd ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript; CI=false npm run build + # Share artifact inside workflow + - name: List files in the repository + run: | + ls ${{ github.workspace }}/metrics-app-${{matrix.tenant}} + - name: Create release branch + run: cd ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/; git checkout -b ${{ env.RELEASE_ID }} + - name: Initialize mandatory git config + working-directory: ./metrics-app-${{matrix.tenant}} + run: | + git config user.name "GitHub Actions" + git config user.email noreply@github.com + - name: Push changes + uses: ad-m/github-push-action@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + branch: ${{ env.RELEASE_ID }} + directory: ./metrics-app-${{matrix.tenant}} + - name: Share artifact inside workflow - ${{matrix.tenant}} + uses: actions/upload-artifact@v3 + with: + name: react-github-actions-build-${{matrix.tenant}} + path: | + ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/build + ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/CHANGELOG.md + ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/CHANGELOG.md + - name: Share artifact inside workflow - backend + uses: actions/upload-artifact@v3 + with: + name: react-github-actions-build-backend + path: | + ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/app + ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/requirements.txt + ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/CHANGELOG.md + - run: echo "🍏 This job's status is ${{ job.status }}." + frontend_release: + runs-on: ubuntu-latest + strategy: + matrix: ${{ fromJson(needs.configure.outputs.matrix) }} + # We specify that deploys needs to + # finish before we create a release + # Job outputs are available to all downstream jobs that depend on this job. + needs: [checkout, configure] + steps: + # Download previously shared build + - name: Get artifact + uses: actions/download-artifact@v3 + with: + path: ./metrics-app-${{matrix.tenant}} + name: react-github-actions-build-${{matrix.tenant}} + - name: List files + run: | + pwd + ls -la ./ + ls -la ./metrics-app-${{matrix.tenant}}/ + ls -la ./metrics-app-${{matrix.tenant}}/javascript + ls -la ./metrics-app-${{matrix.tenant}}/javascript/build + - name: Compress action step + uses: a7ul/tar-action@v1.1.0 + id: compress + # The frontend release is only the javascript/build + # We need to include all the files and directories since + # the action is not smart enough to under asterisk (*) + with: + command: c + cwd: ./metrics-app-${{matrix.tenant}}/javascript/build + files: | + asset-manifest.json + index.html + static + outPath: frontend-${{matrix.tenant}}-release-build.tar.gz + # Upload as an artifact of the current workflow + - name: Upload frontend build zip artifact + uses: actions/upload-artifact@v1 + with: + name: frontend-${{matrix.tenant}}-release-build.tar.gz + path: frontend-${{matrix.tenant}}-release-build.tar.gz + - name: Create Release + uses: ncipollo/release-action@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + artifacts: "frontend-${{matrix.tenant}}-release-build.tar.gz" + bodyFile: "./metrics-app-${{matrix.tenant}}/javascript/CHANGELOG.md" + tag: "frontend-${{matrix.tenant}}-${{ github.ref_name }}-${{ env.RELEASE_ID }}" + makeLatest: true + generateReleaseNotes: true + backend_release: + runs-on: ubuntu-latest + # We specify that deploys needs to + # finish before we create a release + # Job outputs are available to all downstream jobs that depend on this job. + needs: [checkout, configure] + steps: + # Download previously shared build + - name: Get artifact + uses: actions/download-artifact@v3 + with: + name: react-github-actions-build-backend + - name: Compress action step + uses: a7ul/tar-action@v1.1.0 + id: compress + with: + command: c + cwd: ./ + files: | + app/ + requirements.txt + outPath: backend-release-build.tar.gz + - name: Upload backend build zip artifact + uses: actions/upload-artifact@v1 + with: + name: backend-release-build.tar.gz + path: backend-release-build.tar.gz + - name: Create Release + uses: ncipollo/release-action@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + artifacts: "backend-release-build.tar.gz" + bodyFile: "CHANGELOG.md" + tag: "metrics-api-${{ github.ref_name }}-${{ env.RELEASE_ID }}" + makeLatest: true + generateReleaseNotes: true + deploy_backend: + runs-on: ubuntu-latest + # We specify that deploys needs to + # finish before we create a release + needs: backend_release + steps: + - name: Download playbook + uses: actions/checkout@v3 + with: + # Repository name with owner. For example, actions/checkout + # Default: ${{ github.repository }} + repository: 'rciam/rciam-deploy' + ref: 'devel' + path: 'roles' + - name: Download inventory + uses: actions/checkout@v3 + with: + repository: 'grnet/rciam-deploy-inv' + ref: 'master' + ssh-key: ${{ secrets.DEPLOY_READ_SECRET }} + path: 'inventory' + - name: Run playbook (deploy rciam-metrics) with release metrics-api-${{ github.ref_name }}-${{ env.RELEASE_ID }} + uses: dawidd6/action-ansible-playbook@v2 + with: + # Required, playbook filepath + playbook: metricsservers.yml + # Optional, directory where playbooks live + directory: ./roles + key: ${{ secrets.DEPLOY_READ_SECRET }} + # Optional, encrypted vault password + vault_password: ${{secrets.VAULT_PASSWORD_DEVEL}} + options: | + --inventory ${{ github.workspace }}/inventory/rciam-metrics-dev/hosts.ini + --tags rciam-metrics:deploy-backend + -u debian + --extra-vars "metrics_release=metrics-api-${{ github.ref_name }}-${{ env.RELEASE_ID }}" + deploy_frontend: + runs-on: ubuntu-latest + strategy: + matrix: ${{ fromJson(needs.configure.outputs.matrix) }} + # We specify that deploys needs to + # finish before we create a release + # Job outputs are available to all downstream jobs that depend on this job. + needs: [frontend_release, configure] + steps: + - name: Download playbook + uses: actions/checkout@v3 + with: + # Repository name with owner. For example, actions/checkout + # Default: ${{ github.repository }} + repository: 'rciam/rciam-deploy' + ref: 'devel' + path: 'roles' + - name: Download inventory + uses: actions/checkout@v3 + with: + repository: 'grnet/rciam-deploy-inv' + ref: 'master' + ssh-key: ${{ secrets.DEPLOY_READ_SECRET }} + path: 'inventory' + - name: Run playbook (deploy rciam-metrics) with release frontend-${{matrix.tenant}}-${{ github.ref_name }}-${{ env.RELEASE_ID }} + uses: dawidd6/action-ansible-playbook@v2 + with: + # Required, playbook filepath + playbook: metricsservers.yml + # Optional, directory where playbooks live + directory: ./roles + key: ${{ secrets.DEPLOY_READ_SECRET }} + # Optional, encrypted vault password + vault_password: ${{secrets.VAULT_PASSWORD_DEVEL}} + options: | + --inventory ${{ github.workspace }}/inventory/rciam-metrics-dev/hosts.ini + --tags rciam-metrics:deploy-frontend + -u debian + --extra-vars "metrics_release=frontend-${{matrix.tenant}}-${{ github.ref_name }}-${{ env.RELEASE_ID }} tenant_environment=${{matrix.tenant}}" \ No newline at end of file diff --git a/.gitignore b/.gitignore index bc2c358..7ec08dd 100644 --- a/.gitignore +++ b/.gitignore @@ -134,6 +134,7 @@ dmypy.json /node_modules .npm/** javascript/.npm/** +javascript/.bash_history .gnupg/** .idea/** .yarn/** @@ -141,3 +142,4 @@ javascript/.npm/** .vscode/ .DS_Store yarn-error.log + diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..7463db7 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,4 @@ +### Changelog + +## Prerelease logging +- Development RC phase \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 4831740..00256e5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ ARG PYTHON_IMAGE_REPO=python -FROM ${PYTHON_IMAGE_REPO}:3.8.15-bullseye +FROM FROM ${PYTHON_IMAGE_REPO}:3.11.5-bookworm RUN curl -sL https://deb.nodesource.com/setup_18.x | sed "s/exec_cmd 'apt-get update'/exec_cmd 'apt-get --allow-releaseinfo-change update'/" | bash - RUN echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list RUN curl https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - @@ -18,6 +18,8 @@ RUN apt-get -qq --allow-releaseinfo-change update \ # Create working directory ENV APP_HOME /app +ENV API_ENVIRONMENT dev + RUN mkdir -p $APP_HOME WORKDIR $APP_HOME @@ -26,14 +28,17 @@ ARG APP_GID=1000 RUN groupadd -g ${APP_GID} app RUN useradd -u ${APP_UID} -g ${APP_GID} -d $APP_HOME app +RUN echo $(python3 -m site --user-base) # set environment variables +ENV PATH $APP_HOME/.local/bin:${PATH} ENV PYTHONDONTWRITEBYTECODE 1 ENV PYTHONUNBUFFERED 1 ENV ENVIRONMENT dev ENV TESTING 0 #COPY requirements* $APP_HOME +RUN echo "fs.inotify.max_user_watches=524288" >> /etc/sysctl.conf USER app:app diff --git a/README.md b/README.md index 0354330..40e538f 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,4 @@ -# rciam metrics -RCIAM METRICS service +

RCIAM Metrics v0.1.0

## Install @@ -11,10 +10,10 @@ docker-compose pull ### Install python dependencies docker-compose run --rm --no-deps web pip install --upgrade pip -docker-compose run --rm --no-deps web pip install -r requirements.txt +docker-compose run --rm --no-deps web pip3 install --no-cache-dir -r requirements.txt ### Install nodejs dependencies -docker-compose run --rm --no-deps api npm install +docker-compose run --rm --no-deps api npm install --prefer-online ### Run Database deployment [//]: # (docker-compose run --rm web alembic revision --autogenerate -m 'Initial Migration') @@ -25,4 +24,4481 @@ docker-compose run --rm web alembic upgrade head [//]: # (docker-compose run --rm web python app/seed.py) ### Start the Service -docker-compose up api \ No newline at end of file +docker-compose up api + +## API Guide + + +

users

+ +## read_users_country_registered_users_country_get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /registered_users_country?tenenv_id=0 \ + -H 'Accept: application/json' + +``` + +```http +GET /registered_users_country?tenenv_id=0 HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/registered_users_country?tenenv_id=0', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/registered_users_country', + params: { + 'tenenv_id' => 'integer' +}, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/registered_users_country', params={ + 'tenenv_id': '0' +}, headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/registered_users_country', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/registered_users_country?tenenv_id=0"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/registered_users_country", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /registered_users_country` + +*Read Users Country* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|offset|query|integer|false|none| +|startDate|query|string|false|none| +|endDate|query|string|false|none| +|tenenv_id|query|integer|true|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +## read_users_country_groupby_registered_users_country_group_by__group_by__get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /registered_users_country_group_by/{group_by}?tenenv_id=0 \ + -H 'Accept: application/json' + +``` + +```http +GET /registered_users_country_group_by/{group_by}?tenenv_id=0 HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/registered_users_country_group_by/{group_by}?tenenv_id=0', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/registered_users_country_group_by/{group_by}', + params: { + 'tenenv_id' => 'integer' +}, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/registered_users_country_group_by/{group_by}', params={ + 'tenenv_id': '0' +}, headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/registered_users_country_group_by/{group_by}', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/registered_users_country_group_by/{group_by}?tenenv_id=0"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/registered_users_country_group_by/{group_by}", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /registered_users_country_group_by/{group_by}` + +*Read Users Country Groupby* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|group_by|path|string|true|none| +|offset|query|integer|false|none| +|startDate|query|string|false|none| +|endDate|query|string|false|none| +|tenenv_id|query|integer|true|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +## read_users_groupby_registered_users_groupby__group_by__get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /registered_users_groupby/{group_by}?tenenv_id=0 \ + -H 'Accept: application/json' + +``` + +```http +GET /registered_users_groupby/{group_by}?tenenv_id=0 HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/registered_users_groupby/{group_by}?tenenv_id=0', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/registered_users_groupby/{group_by}', + params: { + 'tenenv_id' => 'integer' +}, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/registered_users_groupby/{group_by}', params={ + 'tenenv_id': '0' +}, headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/registered_users_groupby/{group_by}', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/registered_users_groupby/{group_by}?tenenv_id=0"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/registered_users_groupby/{group_by}", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /registered_users_groupby/{group_by}` + +*Read Users Groupby* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|group_by|path|string|true|none| +|offset|query|integer|false|none| +|interval|query|string|false|none| +|count_interval|query|integer|false|none| +|startDate|query|string|false|none| +|endDate|query|string|false|none| +|tenenv_id|query|integer|true|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +## read_users_countby_registered_users_countby_get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /registered_users_countby?tenenv_id=0 \ + -H 'Accept: application/json' + +``` + +```http +GET /registered_users_countby?tenenv_id=0 HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/registered_users_countby?tenenv_id=0', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/registered_users_countby', + params: { + 'tenenv_id' => 'integer' +}, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/registered_users_countby', params={ + 'tenenv_id': '0' +}, headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/registered_users_countby', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/registered_users_countby?tenenv_id=0"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/registered_users_countby", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /registered_users_countby` + +*Read Users Countby* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|offset|query|integer|false|none| +|interval|query|string|false|none| +|count_interval|query|integer|false|none| +|tenenv_id|query|integer|true|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +

communities

+ +## read_members_members__get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /members/ \ + -H 'Accept: application/json' + +``` + +```http +GET /members/ HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/members/', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/members/', + params: { + }, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/members/', headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/members/', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/members/"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/members/", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /members/` + +*Read Members* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|offset|query|integer|false|none| + +> Example responses + +> 200 Response + +```json +[ + { + "community_id": 0, + "hasheduserid": "string", + "status": "string", + "community_info": { + "name": "string", + "description": "string", + "source": "string", + "id": 0 + } + } +] +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ +Status Code **200** + +*Response Read Members Members Get* + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|Response Read Members Members Get|[[MembersReadWithCommunityInfo](#schemamembersreadwithcommunityinfo)]|false|none|none| +|» MembersReadWithCommunityInfo|[MembersReadWithCommunityInfo](#schemamembersreadwithcommunityinfo)|false|none|none| +|»» community_id|integer|true|none|none| +|»» hasheduserid|string|true|none|none| +|»» status|string|true|none|none| +|»» community_info|[Community_InfoRead](#schemacommunity_inforead)|true|none|none| +|»»» name|string|true|none|none| +|»»» description|string|true|none|none| +|»»» source|string|true|none|none| +|»»» id|integer|true|none|none| + + + +## read_members_bystatus_members_bystatus__get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /members_bystatus/?tenenv_id=0 \ + -H 'Accept: application/json' + +``` + +```http +GET /members_bystatus/?tenenv_id=0 HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/members_bystatus/?tenenv_id=0', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/members_bystatus/', + params: { + 'tenenv_id' => 'integer' +}, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/members_bystatus/', params={ + 'tenenv_id': '0' +}, headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/members_bystatus/', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/members_bystatus/?tenenv_id=0"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/members_bystatus/", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /members_bystatus/` + +*Read Members Bystatus* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|offset|query|integer|false|none| +|community_id|query|integer|false|none| +|tenenv_id|query|integer|true|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +## read_communities_communities_groupby__group_by__get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /communities_groupby/{group_by}?tenenv_id=0 \ + -H 'Accept: application/json' + +``` + +```http +GET /communities_groupby/{group_by}?tenenv_id=0 HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/communities_groupby/{group_by}?tenenv_id=0', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/communities_groupby/{group_by}', + params: { + 'tenenv_id' => 'integer' +}, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/communities_groupby/{group_by}', params={ + 'tenenv_id': '0' +}, headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/communities_groupby/{group_by}', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/communities_groupby/{group_by}?tenenv_id=0"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/communities_groupby/{group_by}", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /communities_groupby/{group_by}` + +*Read Communities* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|group_by|path|string|true|none| +|offset|query|integer|false|none| +|tenenv_id|query|integer|true|none| +|interval|query|string|false|none| +|count_interval|query|integer|false|none| +|startDate|query|string|false|none| +|endDate|query|string|false|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +## read_community_communities__get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /communities/?tenenv_id=0 \ + -H 'Accept: application/json' + +``` + +```http +GET /communities/?tenenv_id=0 HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/communities/?tenenv_id=0', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/communities/', + params: { + 'tenenv_id' => 'integer' +}, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/communities/', params={ + 'tenenv_id': '0' +}, headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/communities/', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/communities/?tenenv_id=0"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/communities/", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /communities/` + +*Read Community* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|community_id|query|integer|false|none| +|tenenv_id|query|integer|true|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +## read_communities_info_communities_info__get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /communities_info/ \ + -H 'Accept: application/json' + +``` + +```http +GET /communities_info/ HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/communities_info/', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/communities_info/', + params: { + }, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/communities_info/', headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/communities_info/', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/communities_info/"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/communities_info/", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /communities_info/` + +*Read Communities Info* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|offset|query|integer|false|none| + +> Example responses + +> 200 Response + +```json +[ + { + "name": "string", + "description": "string", + "source": "string", + "id": 0 + } +] +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ +Status Code **200** + +*Response Read Communities Info Communities Info Get* + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|Response Read Communities Info Communities Info Get|[[Community_InfoRead](#schemacommunity_inforead)]|false|none|none| +|» Community_InfoRead|[Community_InfoRead](#schemacommunity_inforead)|false|none|none| +|»» name|string|true|none|none| +|»» description|string|true|none|none| +|»» source|string|true|none|none| +|»» id|integer|true|none|none| + + + +

countries

+ +## read_countries_countries__get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /countries/ \ + -H 'Accept: application/json' + +``` + +```http +GET /countries/ HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/countries/', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/countries/', + params: { + }, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/countries/', headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/countries/', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/countries/"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/countries/", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /countries/` + +*Read Countries* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|offset|query|integer|false|none| +|tag|query|string|false|none| +|skip|query|boolean|false|none| + +> Example responses + +> 200 Response + +```json +[ + { + "countrycode": "string", + "country": "string", + "id": 0 + } +] +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ +Status Code **200** + +*Response Read Countries Countries Get* + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|Response Read Countries Countries Get|[[Country_CodesRead](#schemacountry_codesread)]|false|none|none| +|» Country_CodesRead|[Country_CodesRead](#schemacountry_codesread)|false|none|none| +|»» countrycode|string|true|none|none| +|»» country|string|true|none|none| +|»» id|integer|true|none|none| + + + +## read_country_stats_country_stats__get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /country_stats/ \ + -H 'Accept: application/json' + +``` + +```http +GET /country_stats/ HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/country_stats/', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/country_stats/', + params: { + }, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/country_stats/', headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/country_stats/', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/country_stats/"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/country_stats/", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /country_stats/` + +*Read Country Stats* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|offset|query|integer|false|none| +|tag|query|string|false|none| +|skip|query|boolean|false|none| + +> Example responses + +> 200 Response + +```json +[ + { + "date": "2019-08-24", + "hasheduserid": "string", + "sourceidpid": 0, + "serviceid": 0, + "countryid": 0, + "count": 0, + "identityprovider_info": { + "entityid": "string", + "name": "string", + "id": 0 + }, + "serviceprovider_info": { + "identifier": "string", + "name": "string", + "id": 0 + }, + "country_info": { + "countrycode": "string", + "country": "string", + "id": 0 + } + } +] +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ +Status Code **200** + +*Response Read Country Stats Country Stats Get* + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|Response Read Country Stats Country Stats Get|[[Statistics_Country_HashedwithInfo](#schemastatistics_country_hashedwithinfo)]|false|none|none| +|» Statistics_Country_HashedwithInfo|[Statistics_Country_HashedwithInfo](#schemastatistics_country_hashedwithinfo)|false|none|none| +|»» date|string(date)|true|none|none| +|»» hasheduserid|string|true|none|none| +|»» sourceidpid|integer|true|none|none| +|»» serviceid|integer|true|none|none| +|»» countryid|integer|true|none|none| +|»» count|integer|true|none|none| +|»» identityprovider_info|[IdentityprovidersmapRead](#schemaidentityprovidersmapread)|false|none|none| +|»»» entityid|string|true|none|none| +|»»» name|string|true|none|none| +|»»» id|integer|true|none|none| +|»» serviceprovider_info|[ServiceprovidersmapRead](#schemaserviceprovidersmapread)|false|none|none| +|»»» identifier|string|true|none|none| +|»»» name|string|true|none|none| +|»»» id|integer|true|none|none| +|»» country_info|[Country_CodesRead](#schemacountry_codesread)|false|none|none| +|»»» countrycode|string|true|none|none| +|»»» country|string|true|none|none| +|»»» id|integer|true|none|none| + + + +## read_country_stats_by_vo_country_stats_by_vo__community_id__get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /country_stats_by_vo/{community_id} \ + -H 'Accept: application/json' + +``` + +```http +GET /country_stats_by_vo/{community_id} HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/country_stats_by_vo/{community_id}', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/country_stats_by_vo/{community_id}', + params: { + }, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/country_stats_by_vo/{community_id}', headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/country_stats_by_vo/{community_id}', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/country_stats_by_vo/{community_id}"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/country_stats_by_vo/{community_id}", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /country_stats_by_vo/{community_id}` + +*Read Country Stats By Vo* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|community_id|path|integer|true|none| +|offset|query|integer|false|none| +|tag|query|string|false|none| +|skip|query|boolean|false|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +

logins

+ +## read_logins_per_idp_logins_per_idp_get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /logins_per_idp?tenenv_id=0 \ + -H 'Accept: application/json' + +``` + +```http +GET /logins_per_idp?tenenv_id=0 HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/logins_per_idp?tenenv_id=0', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/logins_per_idp', + params: { + 'tenenv_id' => 'integer' +}, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/logins_per_idp', params={ + 'tenenv_id': '0' +}, headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/logins_per_idp', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/logins_per_idp?tenenv_id=0"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/logins_per_idp", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /logins_per_idp` + +*Read Logins Per Idp* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|offset|query|integer|false|none| +|sp|query|string|false|none| +|startDate|query|string|false|none| +|endDate|query|string|false|none| +|tenenv_id|query|integer|true|none| +|unique_logins|query|boolean|false|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +## read_logins_per_sp_logins_per_sp_get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /logins_per_sp?tenenv_id=0 \ + -H 'Accept: application/json' + +``` + +```http +GET /logins_per_sp?tenenv_id=0 HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/logins_per_sp?tenenv_id=0', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/logins_per_sp', + params: { + 'tenenv_id' => 'integer' +}, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/logins_per_sp', params={ + 'tenenv_id': '0' +}, headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/logins_per_sp', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/logins_per_sp?tenenv_id=0"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/logins_per_sp", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /logins_per_sp` + +*Read Logins Per Sp* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|offset|query|integer|false|none| +|idp|query|string|false|none| +|startDate|query|string|false|none| +|endDate|query|string|false|none| +|tenenv_id|query|integer|true|none| +|unique_logins|query|boolean|false|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +## read_logins_per_country_logins_per_country_get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /logins_per_country?tenenv_id=0 \ + -H 'Accept: application/json' + +``` + +```http +GET /logins_per_country?tenenv_id=0 HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/logins_per_country?tenenv_id=0', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/logins_per_country', + params: { + 'tenenv_id' => 'integer' +}, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/logins_per_country', params={ + 'tenenv_id': '0' +}, headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/logins_per_country', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/logins_per_country?tenenv_id=0"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/logins_per_country", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /logins_per_country` + +*Read Logins Per Country* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|offset|query|integer|false|none| +|group_by|query|string|false|none| +|startDate|query|string|false|none| +|endDate|query|string|false|none| +|tenenv_id|query|integer|true|none| +|unique_logins|query|boolean|false|none| +|idpId|query|integer|false|none| +|spId|query|integer|false|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +## read_logins_countby_logins_countby_get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /logins_countby?tenenv_id=0 \ + -H 'Accept: application/json' + +``` + +```http +GET /logins_countby?tenenv_id=0 HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/logins_countby?tenenv_id=0', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/logins_countby', + params: { + 'tenenv_id' => 'integer' +}, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/logins_countby', params={ + 'tenenv_id': '0' +}, headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/logins_countby', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/logins_countby?tenenv_id=0"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/logins_countby", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /logins_countby` + +*Read Logins Countby* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|offset|query|integer|false|none| +|interval|query|string|false|none| +|count_interval|query|integer|false|none| +|tenenv_id|query|integer|true|none| +|unique_logins|query|boolean|false|none| +|idpId|query|integer|false|none| +|spId|query|integer|false|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +## read_logins_groupby_logins_groupby__group_by__get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /logins_groupby/{group_by}?tenenv_id=0 \ + -H 'Accept: application/json' + +``` + +```http +GET /logins_groupby/{group_by}?tenenv_id=0 HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/logins_groupby/{group_by}?tenenv_id=0', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/logins_groupby/{group_by}', + params: { + 'tenenv_id' => 'integer' +}, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/logins_groupby/{group_by}', params={ + 'tenenv_id': '0' +}, headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/logins_groupby/{group_by}', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/logins_groupby/{group_by}?tenenv_id=0"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/logins_groupby/{group_by}", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /logins_groupby/{group_by}` + +*Read Logins Groupby* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|group_by|path|string|true|none| +|offset|query|integer|false|none| +|idp|query|string|false|none| +|sp|query|string|false|none| +|tenenv_id|query|integer|true|none| +|unique_logins|query|boolean|false|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +

dashboard

+ +## read_tenenv_byname_tenenv__tenant_name___environment_name__get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /tenenv/{tenant_name}/{environment_name} \ + -H 'Accept: application/json' + +``` + +```http +GET /tenenv/{tenant_name}/{environment_name} HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/tenenv/{tenant_name}/{environment_name}', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/tenenv/{tenant_name}/{environment_name}', + params: { + }, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/tenenv/{tenant_name}/{environment_name}', headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/tenenv/{tenant_name}/{environment_name}', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/tenenv/{tenant_name}/{environment_name}"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/tenenv/{tenant_name}/{environment_name}", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /tenenv/{tenant_name}/{environment_name}` + +*Read Tenenv Byname* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|tenant_name|path|string|true|none| +|environment_name|path|string|true|none| +|offset|query|integer|false|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +## read_environment_byname_environment_byname__environment_name__get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /environment_byname/{environment_name} \ + -H 'Accept: application/json' + +``` + +```http +GET /environment_byname/{environment_name} HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/environment_byname/{environment_name}', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/environment_byname/{environment_name}', + params: { + }, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/environment_byname/{environment_name}', headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/environment_byname/{environment_name}', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/environment_byname/{environment_name}"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/environment_byname/{environment_name}", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /environment_byname/{environment_name}` + +*Read Environment Byname* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|environment_name|path|string|true|none| +|offset|query|integer|false|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +## read_idps_idps_get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /idps?tenenv_id=0 \ + -H 'Accept: application/json' + +``` + +```http +GET /idps?tenenv_id=0 HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/idps?tenenv_id=0', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/idps', + params: { + 'tenenv_id' => 'integer' +}, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/idps', params={ + 'tenenv_id': '0' +}, headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/idps', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/idps?tenenv_id=0"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/idps", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /idps` + +*Read Idps* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|tenenv_id|query|integer|true|none| +|idpId|query|integer|false|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +## read_sps_sps_get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /sps?tenenv_id=0 \ + -H 'Accept: application/json' + +``` + +```http +GET /sps?tenenv_id=0 HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/sps?tenenv_id=0', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/sps', + params: { + 'tenenv_id' => 'integer' +}, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/sps', params={ + 'tenenv_id': '0' +}, headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/sps', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/sps?tenenv_id=0"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/sps", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /sps` + +*Read Sps* + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|tenenv_id|query|integer|true|none| +|spId|query|integer|false|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +

ams

+ +## get_verification_ams_stats_ams_verification_hash_get + + + +> Code samples + +```shell +# You can also use wget +curl -X GET /ams_stats/ams_verification_hash \ + -H 'Accept: application/json' + +``` + +```http +GET /ams_stats/ams_verification_hash HTTP/1.1 + +Accept: application/json + +``` + +```javascript + +const headers = { + 'Accept':'application/json' +}; + +fetch('/ams_stats/ams_verification_hash', +{ + method: 'GET', + + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Accept' => 'application/json' +} + +result = RestClient.get '/ams_stats/ams_verification_hash', + params: { + }, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Accept': 'application/json' +} + +r = requests.get('/ams_stats/ams_verification_hash', headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('GET','/ams_stats/ams_verification_hash', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/ams_stats/ams_verification_hash"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("GET"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Accept": []string{"application/json"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("GET", "/ams_stats/ams_verification_hash", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`GET /ams_stats/ams_verification_hash` + +*Get Verification* + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| + +

Response Schema

+ + + +## get_ams_stats_ams_stats_post + + + +> Code samples + +```shell +# You can also use wget +curl -X POST /ams_stats \ + -H 'Content-Type: application/json' \ + -H 'Accept: application/json' \ + -H 'Authorization: string' + +``` + +```http +POST /ams_stats HTTP/1.1 + +Content-Type: application/json +Accept: application/json +Authorization: string + +``` + +```javascript +const inputBody = 'null'; +const headers = { + 'Content-Type':'application/json', + 'Accept':'application/json', + 'Authorization':'string' +}; + +fetch('/ams_stats', +{ + method: 'POST', + body: inputBody, + headers: headers +}) +.then(function(res) { + return res.json(); +}).then(function(body) { + console.log(body); +}); + +``` + +```ruby +require 'rest-client' +require 'json' + +headers = { + 'Content-Type' => 'application/json', + 'Accept' => 'application/json', + 'Authorization' => 'string' +} + +result = RestClient.post '/ams_stats', + params: { + }, headers: headers + +p JSON.parse(result) + +``` + +```python +import requests +headers = { + 'Content-Type': 'application/json', + 'Accept': 'application/json', + 'Authorization': 'string' +} + +r = requests.post('/ams_stats', headers = headers) + +print(r.json()) + +``` + +```php + 'application/json', + 'Accept' => 'application/json', + 'Authorization' => 'string', +); + +$client = new \GuzzleHttp\Client(); + +// Define array of request body. +$request_body = array(); + +try { + $response = $client->request('POST','/ams_stats', array( + 'headers' => $headers, + 'json' => $request_body, + ) + ); + print_r($response->getBody()->getContents()); + } + catch (\GuzzleHttp\Exception\BadResponseException $e) { + // handle exception or api errors. + print_r($e->getMessage()); + } + + // ... + +``` + +```java +URL obj = new URL("/ams_stats"); +HttpURLConnection con = (HttpURLConnection) obj.openConnection(); +con.setRequestMethod("POST"); +int responseCode = con.getResponseCode(); +BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); +String inputLine; +StringBuffer response = new StringBuffer(); +while ((inputLine = in.readLine()) != null) { + response.append(inputLine); +} +in.close(); +System.out.println(response.toString()); + +``` + +```go +package main + +import ( + "bytes" + "net/http" +) + +func main() { + + headers := map[string][]string{ + "Content-Type": []string{"application/json"}, + "Accept": []string{"application/json"}, + "Authorization": []string{"string"}, + } + + data := bytes.NewBuffer([]byte{jsonReq}) + req, err := http.NewRequest("POST", "/ams_stats", data) + req.Header = headers + + client := &http.Client{} + resp, err := client.Do(req) + // ... +} + +``` + +`POST /ams_stats` + +*Get Ams Stats* + +> Body parameter + +```json +null +``` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|Authorization|header|string|false|none| +|body|body|any|true|none| + +> Example responses + +> 200 Response + +```json +null +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline| +|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)| + +

Response Schema

+ + + +# Schemas + +

Community_InfoRead

+ + + + + + +```json +{ + "name": "string", + "description": "string", + "source": "string", + "id": 0 +} + +``` + +Community_InfoRead + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|name|string|true|none|none| +|description|string|true|none|none| +|source|string|true|none|none| +|id|integer|true|none|none| + +

Country_CodesRead

+ + + + + + +```json +{ + "countrycode": "string", + "country": "string", + "id": 0 +} + +``` + +Country_CodesRead + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|countrycode|string|true|none|none| +|country|string|true|none|none| +|id|integer|true|none|none| + +

HTTPValidationError

+ + + + + + +```json +{ + "detail": [ + { + "loc": [ + "string" + ], + "msg": "string", + "type": "string" + } + ] +} + +``` + +HTTPValidationError + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|detail|[[ValidationError](#schemavalidationerror)]|false|none|none| + +

IdentityprovidersmapRead

+ + + + + + +```json +{ + "entityid": "string", + "name": "string", + "id": 0 +} + +``` + +IdentityprovidersmapRead + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|entityid|string|true|none|none| +|name|string|true|none|none| +|id|integer|true|none|none| + +

MembersReadWithCommunityInfo

+ + + + + + +```json +{ + "community_id": 0, + "hasheduserid": "string", + "status": "string", + "community_info": { + "name": "string", + "description": "string", + "source": "string", + "id": 0 + } +} + +``` + +MembersReadWithCommunityInfo + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|community_id|integer|true|none|none| +|hasheduserid|string|true|none|none| +|status|string|true|none|none| +|community_info|[Community_InfoRead](#schemacommunity_inforead)|true|none|none| + +

ServiceprovidersmapRead

+ + + + + + +```json +{ + "identifier": "string", + "name": "string", + "id": 0 +} + +``` + +ServiceprovidersmapRead + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|identifier|string|true|none|none| +|name|string|true|none|none| +|id|integer|true|none|none| + +

Statistics_Country_HashedwithInfo

+ + + + + + +```json +{ + "date": "2019-08-24", + "hasheduserid": "string", + "sourceidpid": 0, + "serviceid": 0, + "countryid": 0, + "count": 0, + "identityprovider_info": { + "entityid": "string", + "name": "string", + "id": 0 + }, + "serviceprovider_info": { + "identifier": "string", + "name": "string", + "id": 0 + }, + "country_info": { + "countrycode": "string", + "country": "string", + "id": 0 + } +} + +``` + +Statistics_Country_HashedwithInfo + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|date|string(date)|true|none|none| +|hasheduserid|string|true|none|none| +|sourceidpid|integer|true|none|none| +|serviceid|integer|true|none|none| +|countryid|integer|true|none|none| +|count|integer|true|none|none| +|identityprovider_info|[IdentityprovidersmapRead](#schemaidentityprovidersmapread)|false|none|none| +|serviceprovider_info|[ServiceprovidersmapRead](#schemaserviceprovidersmapread)|false|none|none| +|country_info|[Country_CodesRead](#schemacountry_codesread)|false|none|none| + +

ValidationError

+ + + + + + +```json +{ + "loc": [ + "string" + ], + "msg": "string", + "type": "string" +} + +``` + +ValidationError + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|loc|[anyOf]|true|none|none| + +anyOf + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|» *anonymous*|string|false|none|none| + +or + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|» *anonymous*|integer|false|none|none| + +continued + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|msg|string|true|none|none| +|type|string|true|none|none| + diff --git a/app/auth/__init__.py b/app/auth/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/auth/auth.py b/app/auth/auth.py new file mode 100644 index 0000000..ed8759d --- /dev/null +++ b/app/auth/auth.py @@ -0,0 +1,260 @@ +""" +Module for validating Open ID Connect tokens. +Usage +===== +.. code-block:: python3 + # This assumes you've already configured Auth in your_app/auth.py + from your_app.auth import auth + @app.get("/auth") + def test_auth(authenticated_user: IDToken = Security(auth.required)): + return f"Hello {authenticated_user.preferred_username}" +""" + +from typing import List +from typing import Optional +from typing import Type + +from fastapi import Depends +from fastapi import HTTPException +from fastapi import Request +from fastapi import status +from fastapi.openapi.models import OAuthFlowAuthorizationCode +from fastapi.openapi.models import OAuthFlowClientCredentials +from fastapi.openapi.models import OAuthFlowImplicit +from fastapi.openapi.models import OAuthFlowPassword +from fastapi.openapi.models import OAuthFlows +from fastapi.security import HTTPAuthorizationCredentials +from fastapi.security import HTTPBearer +from fastapi.security import OAuth2 +from fastapi.security import SecurityScopes +from jose import ExpiredSignatureError +from jose import JWTError +from jose import jwt +from jose.exceptions import JWTClaimsError + +from app.auth import discovery +from app.auth.grant_types import GrantType +from app.auth.idtoken_types import IDToken + + +class Auth(OAuth2): + def __init__( + self, + openid_connect_url: str, + issuer: Optional[str] = None, + client_id: Optional[str] = None, + redirect_uri: Optional[str] = None, + scopes: List[str] = list(), + grant_types: List[GrantType] = [GrantType.IMPLICIT], + signature_cache_ttl: int = 3600, + idtoken_model: Type[IDToken] = IDToken, + ): + """Configure authentication :func:`auth = Auth(...) ` and then: + 1. Show authentication in the interactive docs with :func:`Depends(auth) ` + when setting up FastAPI. + 2. Use :func:`Security(auth.required) ` or + :func:`Security(auth.optional) ` in your endpoints to + check user credentials. + Args: + openid_connect_url (URL): URL to the "well known" openid connect config + e.g. https://dev-123456.okta.com/.well-known/openid-configuration + issuer (URL): (Optional) The issuer URL from your auth server. + client_id (str): (Optional) The client_id configured by your auth server. + scopes (Dict[str, str]): (Optional) A dictionary of scopes and their descriptions. + grant_types (List[GrantType]): (Optional) Grant types shown in docs. + signature_cache_ttl (int): (Optional) How many seconds your app should + cache the authorization server's public signatures. + idtoken_model (Type): (Optional) The model to use for validating the ID Token. + Raises: + Nothing intentional + """ + + self.openid_connect_url = openid_connect_url + self.issuer = issuer + self.client_id = client_id + self.idtoken_model = idtoken_model + self.scopes = scopes + self.redirect_uri = redirect_uri + + self.discover = discovery.configure(cache_ttl=signature_cache_ttl) + oidc_discoveries = self.discover.auth_server( + openid_connect_url=self.openid_connect_url + ) + scopes_dict = { + scope: "" for scope in self.discover.supported_scopes(oidc_discoveries) + } + + flows = OAuthFlows() + if GrantType.AUTHORIZATION_CODE in grant_types: + flows.authorizationCode = OAuthFlowAuthorizationCode( + authorizationUrl=self.discover.authorization_url(oidc_discoveries), + tokenUrl=self.discover.token_url(oidc_discoveries), + scopes=scopes_dict, + ) + + if GrantType.CLIENT_CREDENTIALS in grant_types: + flows.clientCredentials = OAuthFlowClientCredentials( + tokenUrl=self.discover.token_url(oidc_discoveries), + scopes=scopes_dict, + ) + + if GrantType.PASSWORD in grant_types: + flows.password = OAuthFlowPassword( + tokenUrl=self.discover.token_url(oidc_discoveries), + scopes=scopes_dict, + ) + + if GrantType.IMPLICIT in grant_types: + flows.implicit = OAuthFlowImplicit( + authorizationUrl=self.discover.authorization_url(oidc_discoveries), + scopes=scopes_dict, + ) + + super().__init__( + scheme_name="OIDC", + flows=flows, + auto_error=False, + ) + + async def __call__(self, request: Request) -> None: + return None + + def required( + self, + security_scopes: SecurityScopes, + authorization_credentials: Optional[HTTPAuthorizationCredentials] = Depends( + HTTPBearer() + ), + ) -> IDToken: + """Validate and parse OIDC ID token against configuration. + Note this function caches the signatures and algorithms of the issuing + server for signature_cache_ttl seconds. + Args: + security_scopes (SecurityScopes): Security scopes + auth_header (str): Base64 encoded OIDC Token. This is invoked + behind the scenes by Depends. + Return: + IDToken (self.idtoken_model): User information + raises: + HTTPException(status_code=401, detail=f"Unauthorized: {err}") + IDToken validation errors + """ + + id_token = self.authenticate_user( + security_scopes, + authorization_credentials, + auto_error=True, + ) + if id_token is None: + raise HTTPException(status.HTTP_401_UNAUTHORIZED) + else: + return id_token + + def optional( + self, + security_scopes: SecurityScopes, + authorization_credentials: Optional[HTTPAuthorizationCredentials] = Depends( + HTTPBearer(auto_error=False) + ), + ) -> Optional[IDToken]: + """Optionally validate and parse OIDC ID token against configuration. + Will not raise if the user is not authenticated. Note this function + caches the signatures and algorithms of the issuing server for + signature_cache_ttl seconds. + Args: + security_scopes (SecurityScopes): Security scopes + auth_header (str): Base64 encoded OIDC Token. This is invoked + behind the scenes by Depends. + Return: + IDToken (self.idtoken_model): User information + raises: + IDToken validation errors + """ + + return self.authenticate_user( + security_scopes, + authorization_credentials, + auto_error=False, + ) + + def authenticate_user( + self, + security_scopes: SecurityScopes, + authorization_credentials: Optional[HTTPAuthorizationCredentials], + auto_error: bool, + ) -> Optional[IDToken]: + """Validate and parse OIDC ID token against against configuration. + Note this function caches the signatures and algorithms of the issuing server + for signature_cache_ttl seconds. + Args: + security_scopes (SecurityScopes): Security scopes + auth_header (str): Base64 encoded OIDC Token + auto_error (bool): If True, will raise an HTTPException if the user + is not authenticated. + Return: + IDToken (self.idtoken_model): User information + raises: + HTTPException(status_code=401, detail=f"Unauthorized: {err}") + """ + + if ( + authorization_credentials is None + or authorization_credentials.scheme.lower() != "bearer" + ): + if auto_error: + raise HTTPException( + status.HTTP_401_UNAUTHORIZED, detail="Missing bearer token" + ) + else: + return None + + oidc_discoveries = self.discover.auth_server( + openid_connect_url=self.openid_connect_url + ) + key = self.discover.public_keys(oidc_discoveries) + algorithms = self.discover.signing_algos(oidc_discoveries) + + try: + id_token = jwt.decode( + authorization_credentials.credentials, + key, + algorithms, + issuer=self.issuer, + audience=self.client_id, + options={ + # Disabled at_hash check since we aren't using the access token + "verify_at_hash": False, + "verify_iss": self.issuer is not None, + "verify_aud": self.client_id is not None, + }, + ) + + print(id_token) + + # XXX The aud should always be present? + if ( + "aud" in id_token + and type(id_token["aud"]) == list + and len(id_token["aud"]) >= 1 + and "azp" not in id_token + ): + raise JWTError( + 'Missing authorized party "azp" in IDToken when there ' + "are multiple audiences" + ) + + except (ExpiredSignatureError, JWTError, JWTClaimsError) as error: + raise HTTPException(status_code=401, detail=f"Unauthorized: {error}") + + expected_scopes = set(self.scopes + security_scopes.scopes) + token_scopes = id_token.get("scope", "").split(" ") + if not expected_scopes.issubset(token_scopes): + raise HTTPException( + status.HTTP_401_UNAUTHORIZED, + detail=( + f"Missing scope token, expected {expected_scopes} to be a " + f"subset of received {token_scopes}", + ), + ) + + return self.idtoken_model(**id_token) \ No newline at end of file diff --git a/app/auth/discovery.py b/app/auth/discovery.py new file mode 100644 index 0000000..af5aefe --- /dev/null +++ b/app/auth/discovery.py @@ -0,0 +1,49 @@ +from typing import Dict +import requests +from cachetools import TTLCache +from cachetools import cached +from threading import Lock + + +def configure(*_, cache_ttl: int): + @cached(TTLCache(1, cache_ttl), key=lambda d: d["jwks_uri"], lock=Lock()) + def get_authentication_server_public_keys(OIDC_spec: Dict): + """ + Retrieve the public keys used by the authentication server + for signing OIDC ID tokens. + """ + keys_uri = OIDC_spec["jwks_uri"] + r = requests.get(keys_uri) + keys = r.json() + return keys + + def get_signing_algos(OIDC_spec: Dict): + algos = OIDC_spec["id_token_signing_alg_values_supported"] + return algos + + @cached(TTLCache(1, cache_ttl), lock=Lock()) + def discover_auth_server(*_, openid_connect_url: str) -> Dict: + r = requests.get(openid_connect_url) + # Raise if the auth server is failing since we can't verify tokens + r.raise_for_status() + configuration = r.json() + return configuration + + def get_authorization_url(OIDC_spec: Dict) -> str: + return OIDC_spec["authorization_endpoint"] + + def get_token_url(OIDC_spec: Dict) -> str: + return OIDC_spec["token_endpoint"] + + def get_supported_scopes(OIDC_spec: Dict) -> str: + return OIDC_spec["scopes_supported"] + + class functions: + auth_server = discover_auth_server + public_keys = get_authentication_server_public_keys + signing_algos = get_signing_algos + authorization_url = get_authorization_url + token_url = get_token_url + supported_scopes = get_supported_scopes + + return functions diff --git a/app/auth/grant_types.py b/app/auth/grant_types.py new file mode 100644 index 0000000..7db2df7 --- /dev/null +++ b/app/auth/grant_types.py @@ -0,0 +1,10 @@ +from enum import Enum + + +class GrantType(str, Enum): + """Grant types that can be used in the interactive documentation.""" + + AUTHORIZATION_CODE = "authorization_code" + CLIENT_CREDENTIALS = "client_credentials" + IMPLICIT = "implicit" + PASSWORD = "password" # nosec \ No newline at end of file diff --git a/app/auth/idtoken_types.py b/app/auth/idtoken_types.py new file mode 100644 index 0000000..313bd5e --- /dev/null +++ b/app/auth/idtoken_types.py @@ -0,0 +1,54 @@ +from typing import List +from typing import Union + +from pydantic import BaseModel +from pydantic import Extra + + +class IDToken(BaseModel): + """Pydantic model representing an OIDC ID Token. + ID Tokens are polymorphic and may have many attributes not defined in the spec thus this model accepts + all addition fields. Only required fields are listed in the attributes section of this docstring or + enforced by pydantic. + See the specifications here. https://openid.net/specs/openid-connect-core-1_0.html#IDToken + Parameters: + iss (str): Issuer Identifier for the Issuer of the response. + sub (str): Subject Identifier. + aud (Union[str, List[str]]): Audience(s) that this ID Token is intended for. + exp (str): Expiration time on or after which the ID Token MUST NOT be accepted for processing. + iat (iat): Time at which the JWT was issued. + """ + + iss: str + sub: str + aud: Union[str, List[str]] + exp: int + iat: int + + class Config: + extra = Extra.allow + + +class OktaIDToken(IDToken): + """Pydantic Model for the IDToken returned by Okta's OIDC implementation.""" + + auth_time: int + ver: int + jti: str + amr: List[str] + idp: str + nonce: str + at_hash: str + name: str + email: str + preferred_username: str + + +class KeycloakIDToken(IDToken): + """Pydantic Model for the IDToken returned by Keycloak's OIDC implementation.""" + + jti: str + name: str + email: str + email_verified: bool + preferred_username: str diff --git a/app/database.py b/app/database.py index d9b5955..b67e276 100644 --- a/app/database.py +++ b/app/database.py @@ -1,12 +1,13 @@ -import os +from app.utils import configParser +from sqlmodel import create_engine, Session -from sqlmodel import create_engine, SQLModel, Session - -# Initialize -url = os.getenv('DATABASE_URL') -engine = create_engine(url) def get_session(): + # Initialize + config_file = 'config.global.py' + + url = configParser.getConfig('database_parameters', config_file)['database_url'] + engine = create_engine(url) + with Session(engine) as session: yield session - diff --git a/app/ingester/__init__.py b/app/ingester/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/ingester/communityIngester.py b/app/ingester/communityIngester.py new file mode 100644 index 0000000..915590f --- /dev/null +++ b/app/ingester/communityIngester.py @@ -0,0 +1,88 @@ +from app.logger import log +from ..database import get_session +from sqlalchemy.exc import NoResultFound +from .utilsIngester import utilsIngester + + +class CommunityDataIngester: + logger = log.get_logger("CommunityDataIngester") + + @classmethod + def getCommunityId(cls, communityName, tenenvId, session): + # Check if community exists + try: + communityId = session.exec( + """ + SELECT id FROM community_info + WHERE name = '{0}' AND tenenv_id={1} + """.format( + communityName, tenenvId + ) + ).one() + except NoResultFound: + cls.logger.error("""Community with name {0} + not found for + tenenvId {1}""".format(communityName, + tenenvId)) + communityId = None + return communityId + + @classmethod + def ingestCommunityDataPerTenenv(cls, tenenvId, session): + # get dates not mapped for communities data + datesNotMapped = utilsIngester.getDatesNotMapped( + "community", + "updated", + tenenvId, + session) + between = "" + if datesNotMapped[0] is not None: + between = " AND (date BETWEEN '{0}' AND '{1}')".format( + datesNotMapped[0], datesNotMapped[1]) + elif datesNotMapped[1] is not None: + between = " AND date <= '{0}'".format( + datesNotMapped[1] + ) + communitiesNotMapped = session.exec(""" + SELECT jsondata FROM statistics_raw WHERE (type='vo') + AND tenenv_id={0} {1} + """.format(tenenvId, between)).all() + communityMappedItems = 0 + for community in communitiesNotMapped: + print(community[0]) + communityId = session.exec("""INSERT INTO community_info( + name, description, source, tenenv_id) + VALUES ('{0}','{1}','{2}', {3}) + ON CONFLICT(name, tenenv_id) + DO UPDATE + set description='{1}' + RETURNING id;""".format(community[0]['voName'], + community[0]['voDescription'], + community[0]['source'], + tenenvId)).one() + session.commit() + print(communityId) + if (communityId[0] is not None): + session.exec("""INSERT INTO community(community_id, created, updated, status, + tenenv_id) + VALUES ({0},'{1}','{1}','{2}',{3}) + ON CONFLICT(community_id, tenenv_id) + DO UPDATE + set status='{2}', updated='{1}' + """.format(communityId[0], + community[0]['date'], community[0]['status'], tenenvId)) + session.commit() + communityMappedItems += 1 + cls.logger.info("""{0} communities ingested or updated""". + format(communityMappedItems)) + + @classmethod + def ingestCommunityData(cls): + session_generator = get_session() + session = next(session_generator) + tenenvIds = session.exec("""SELECT id FROM tenenv_info""").all() + # for each tenenv on database try to ingest CommunityData + # from statistics_raw table + for tenenvId in tenenvIds: + CommunityDataIngester.ingestCommunityDataPerTenenv( + tenenvId[0], session) diff --git a/app/ingester/ingestData.py b/app/ingester/ingestData.py new file mode 100644 index 0000000..a6a6bd5 --- /dev/null +++ b/app/ingester/ingestData.py @@ -0,0 +1,13 @@ +from .communityIngester import CommunityDataIngester +from .usersIngester import UserDataIngester +from .membeshipIngester import MembershipDataIngester +from .loginsIngester import LoginDataIngester + +# Ingest Communities +CommunityDataIngester.ingestCommunityData() +# Ingest Users +UserDataIngester.ingestUserData() +# Ingest Memberships +MembershipDataIngester.ingestMembershipData() +# Ingest Logins +LoginDataIngester.ingestLoginData() diff --git a/app/ingester/loginsIngester.py b/app/ingester/loginsIngester.py new file mode 100644 index 0000000..98dddc7 --- /dev/null +++ b/app/ingester/loginsIngester.py @@ -0,0 +1,234 @@ +from app.logger import log +from ..database import get_session +from app.utils.ipDatabase import geoip2Database +from sqlalchemy.exc import NoResultFound +from .utilsIngester import utilsIngester +import hashlib + +class LoginDataIngester: + logger = log.get_logger("LoginDataIngester") + + @classmethod + def getIdpId(cls, entityid, idpName, tenenvId, session): + # Check if IdP exists + try: + idpId = session.exec( + """ + SELECT id, name FROM identityprovidersmap + WHERE entityid = '{0}' AND tenenv_id={1} + """.format( + entityid, tenenvId + ) + ).one() + # Update idpName with the latest + if (idpId[0] is not None and idpName is not None and idpName != '' + and idpId[1] != idpName): + session.exec( + """ + UPDATE identityprovidersmap SET name = '{0}' + WHERE id = {1} + """.format(idpName, idpId[0]) + ) + except NoResultFound: + cls.logger.info("""Idp with name {0} and entityid {1} + will be created for + tenenvId {2}""".format(idpName, + entityid, + tenenvId)) + idpId = session.exec( + """ + INSERT INTO identityprovidersmap (entityid, name, tenenv_id) + VALUES ('{0}', '{1}', {2}) + RETURNING id; + """.format( + entityid, idpName, tenenvId + ) + ).one() + return idpId + + @classmethod + def getSpId(cls, identifier, spName, tenenvId, session): + # Check if Sp exists + try: + spId = session.exec( + """ + SELECT id, name FROM serviceprovidersmap + WHERE identifier = '{0}' AND tenenv_id={1} + """.format( + identifier, tenenvId + ) + ).one() + # Update spName with the latest + if (spId[0] is not None and spName is not None and spName != '' + and spId[1] != spName): + session.exec( + """ + UPDATE serviceprovidersmap SET name = '{0}' + WHERE id = {1} + """.format(spName, spId[0]) + ) + except NoResultFound: + # If Sp not exists then add it to database + cls.logger.info("""Sp with name {0} and identifier {1} + will be created for + tenenvId {2}""".format(spName, + identifier, + tenenvId)) + spId = session.exec( + """ + INSERT INTO serviceprovidersmap (identifier, name, tenenv_id) + SELECT '{0}', '{1}', {2} + WHERE NOT EXISTS ( + SELECT 1 FROM serviceprovidersmap + WHERE identifier = '{0}' + ) + RETURNING id; + """.format(identifier, spName, tenenvId) + ).one() + return spId + + @classmethod + def getCountryFromCountryCode(cls, countryData, session): + try: + countryId = session.exec( + """ + SELECT id FROM country_codes + WHERE countrycode = '{0}' + """.format( + countryData[0] + ) + ).one() + except NoResultFound: + cls.logger.info("""Country with name {0} + will be created""".format(countryData[1])) + countryId = session.exec( + """ + INSERT INTO country_codes (countrycode, country) + SELECT '{0}', '{1}' + WHERE NOT EXISTS ( + SELECT 1 FROM country_codes + WHERE countrycode = '{0}' + ) + RETURNING id; + """.format(countryData[0], countryData[1]) + ).one() + return countryId + + @classmethod + def getCountryFromIP(cls, ipAddress, session): + # handler for ip databases + ipDatabaseHandler = geoip2Database() + # get country code/ name + try: + countryData = ipDatabaseHandler.getCountryFromIp(ipAddress) + if (countryData[0] is None): + countryData[0] = 'UN' + countryData[1] = 'Unknown' + cls.logger.warning(""" + ip {0} not found at database""".format(ipAddress)) + except (Exception): + countryData = ['UN', 'Unknown'] + cls.logger.warning(""" + ip {0} not found at database""".format(ipAddress)) + # Save country if not exists + try: + countryId = session.exec( + """ + SELECT id FROM country_codes + WHERE countrycode = '{0}' + """.format( + countryData[0] + ) + ).one() + except NoResultFound: + cls.logger.info("""Country with name {0} + will be created""".format(countryData[1])) + countryId = session.exec( + """ + INSERT INTO country_codes (countrycode, country) + SELECT '{0}', '{1}' + WHERE NOT EXISTS ( + SELECT 1 FROM country_codes + WHERE countrycode = '{0}' + ) + RETURNING id; + """.format(countryData[0], countryData[1]) + ).one() + return countryId + + @classmethod + def ingestLoginDataPerTenenv(cls, tenenvId, session): + + # get dates not mapped for logi5ns data + datesNotMapped = utilsIngester.getDatesNotMapped( + "statistics_country_hashed", + "date", + tenenvId, + session) + between = "" + if datesNotMapped[0] is not None: + between = " AND (date BETWEEN '{0}' AND '{1}')".format( + datesNotMapped[0], datesNotMapped[1]) + elif datesNotMapped[1] is not None: + between = " AND date <= '{0}'".format( + datesNotMapped[1] + ) + cls.logger.info(""" + {0} logins """.format(between)) + loginsNotMapped = session.exec(""" + SELECT jsondata FROM statistics_raw WHERE type='login' + AND tenenv_id={0} {1} + """.format(tenenvId, between)).all() + loginMappedItems = 0 + for login in loginsNotMapped: + if (not login[0]['failedLogin'] + and utilsIngester.validateTenenv(login[0]['tenenvId'], session) + and 'voPersonId' in login[0] + and utilsIngester.validateHashedUser(login[0]['voPersonId'], + login[0]['tenenvId'], + session)): + + # Set the to None if they don't have value + login[0]['idpName'] = '' if not login[0].get('idpName') else login[0]['idpName'] + login[0]['spName'] = '' if not login[0].get('spName') else login[0]['spName'] + + # check if idp exists in our database otherwise create it + idpId = LoginDataIngester.getIdpId(login[0]['entityId'], + login[0]['idpName'], + login[0]['tenenvId'], + session) + # check if sp exists in our database otherwise create it + spId = LoginDataIngester.getSpId(login[0]['identifier'], + login[0]['spName'], + login[0]['tenenvId'], + session) + + if ('countryCode' in login[0] and 'countryName' in login[0]): + # find countryId + countryId = LoginDataIngester.getCountryFromCountryCode([login[0]['countryCode'], login[0]['countryName']], session) + # store information at statistics_country_hashed + session.exec( + """ + INSERT INTO statistics_country_hashed(date, hasheduserid, sourceidpid, serviceid, countryid, count, tenenv_id) + VALUES ('{0}', '{1}', {2}, {3}, {4}, {5}, {6}) + ON CONFLICT (date, hasheduserid, sourceidpid, serviceid, countryid, tenenv_id) + DO UPDATE SET count = statistics_country_hashed.count + 1 + """.format( + login[0]["date"], login[0]['voPersonId'], idpId[0], spId[0], countryId[0], 1, login[0]['tenenvId'] + ) + ) + session.commit() + loginMappedItems += 1 + else: + cls.logger.warning("The record {0} was not imported due to validation errors".format(repr(login[0]))) + + cls.logger.info(""" + {0} new logins ingested""".format(loginMappedItems)) + + @classmethod + def ingestLoginData(cls): + session_generator = get_session() + session = next(session_generator) + tenenvIds = session.exec("""SELECT id FROM tenenv_info""").all() + for tenenvId in tenenvIds: + LoginDataIngester.ingestLoginDataPerTenenv(tenenvId[0], session) diff --git a/app/ingester/membeshipIngester.py b/app/ingester/membeshipIngester.py new file mode 100644 index 0000000..5e6fa34 --- /dev/null +++ b/app/ingester/membeshipIngester.py @@ -0,0 +1,80 @@ +from app.logger import log +from ..database import get_session +from sqlalchemy.exc import NoResultFound +from .utilsIngester import utilsIngester + + +class MembershipDataIngester: + logger = log.get_logger("MembershipDataIngester") + + @classmethod + def getCommunityId(cls, communityName, tenenvId, session): + # Check if IdP exists + try: + communityId = session.exec( + """ + SELECT id FROM community_info + WHERE name = '{0}' AND tenenv_id={1} + """.format( + communityName, tenenvId + ) + ).one() + except NoResultFound: + cls.logger.error("""Community with name {0} + not found for + tenenvId {1}""".format(communityName, + tenenvId)) + communityId = None + return communityId + + @classmethod + def ingestMembershipDataPerTenenv(cls, tenenvId, session): + # get dates not mapped for users data + datesNotMapped = utilsIngester.getDatesNotMapped( + "members", + "updated", + tenenvId, + session) + between = "" + if datesNotMapped[0] is not None: + between = " AND (date BETWEEN '{0}' AND '{1}')".format( + datesNotMapped[0], datesNotMapped[1]) + elif datesNotMapped[1] is not None: + between = " AND date <= '{0}'".format( + datesNotMapped[1] + ) + membershipsNotMapped = session.exec(""" + SELECT jsondata FROM statistics_raw WHERE (type='membership') + AND tenenv_id={0} {1} + """.format(tenenvId, between)).all() + membershipMappedItems = 0 + for membership in membershipsNotMapped: + communityId = MembershipDataIngester.getCommunityId( + membership[0]['voName'], tenenvId, session) + if (communityId is None): + cls.logger.error(""" + VO name '{0}' not found """.format(membership[0]['voName'])) + continue + session.exec("""INSERT INTO members(community_id, + hasheduserid, status, tenenv_id, created, updated) + VALUES ('{0}','{1}','{2}', {3}, '{4}', '{4}') + ON CONFLICT(community_id, hasheduserid, tenenv_id) + DO UPDATE + set status='{2}', updated='{4}'""". format( + communityId[0], membership[0]['voPersonId'], membership[0]['status'], + tenenvId, membership[0]['date'])) + session.commit() + membershipMappedItems += 1 + cls.logger.info("""{0} memberships ingested or updated""". + format(membershipMappedItems)) + + @classmethod + def ingestMembershipData(cls): + session_generator = get_session() + session = next(session_generator) + tenenvIds = session.exec("""SELECT id FROM tenenv_info""").all() + # for each tenenv on database try to ingest UserData + # from statistics_raw table + for tenenvId in tenenvIds: + MembershipDataIngester.ingestMembershipDataPerTenenv( + tenenvId[0], session) diff --git a/app/ingester/usersIngester.py b/app/ingester/usersIngester.py new file mode 100644 index 0000000..4ea9281 --- /dev/null +++ b/app/ingester/usersIngester.py @@ -0,0 +1,58 @@ +from app.logger import log +from ..database import get_session +from .utilsIngester import utilsIngester + + +class UserDataIngester: + logger = log.get_logger("UserDataIngester") + + @classmethod + def ingestUserDataPerTenenv(cls, tenenvId, session): + # get dates not mapped for users data + datesNotMapped = utilsIngester.getDatesNotMapped( + "users", + "updated", + tenenvId, + session) + between = "" + if datesNotMapped[0] is not None: + between = " AND (date BETWEEN '{0}' AND '{1}')".format( + datesNotMapped[0], datesNotMapped[1]) + elif datesNotMapped[1] is not None: + between = " AND date <= '{0}'".format( + datesNotMapped[1] + ) + cls.logger.info("""between {0}""".format(between)) + usersNotMapped = session.exec(""" + SELECT jsondata FROM statistics_raw WHERE (type='registration' OR type='user_status') AND tenenv_id={0} {1} + """.format(tenenvId, between)).all() + userMappedItems = 0 + for user in usersNotMapped: + cls.logger.info("""hasheduserid {0}""".format(user[0])) + if (user[0]['type'] == 'registration' and 'status' not in user[0]): + user[0]['status'] = 'A' + if (user[0]['status'] not in ['A', 'S', 'D']): + cls.logger.error(""" + user status '{0}' is not valid """.format(user[0]['status'])) + continue + session.exec("""INSERT INTO users(hasheduserid, created, updated, status, tenenv_id) + VALUES ('{0}','{1}','{1}', '{2}', {3}) + ON CONFLICT(hasheduserid, tenenv_id) + DO UPDATE SET status='{2}', updated='{1}'""". format( + user[0]['voPersonId'], user[0]['date'], user[0]['status'], + user[0]['tenenvId'])) + session.commit() + userMappedItems += 1 + + cls.logger.info(""" + {0} users ingested or updated""".format(userMappedItems)) + + @classmethod + def ingestUserData(cls): + session_generator = get_session() + session = next(session_generator) + tenenvIds = session.exec("""SELECT id FROM tenenv_info""").all() + # for each tenenv on database try to ingest UserData + # from statistics_raw table + for tenenvId in tenenvIds: + UserDataIngester.ingestUserDataPerTenenv(tenenvId[0], session) \ No newline at end of file diff --git a/app/ingester/utilsIngester.py b/app/ingester/utilsIngester.py new file mode 100644 index 0000000..9b27666 --- /dev/null +++ b/app/ingester/utilsIngester.py @@ -0,0 +1,63 @@ +from app.logger import log +from datetime import timedelta, date +from sqlalchemy.exc import NoResultFound + + +class utilsIngester: + logger = log.get_logger("utilsIngester") + + @classmethod + def getDatesNotMapped(cls, table: str, column, tenenvId, session): + # Logins + + maxDate = session.exec(""" + SELECT max({0}::date) FROM {1} WHERE tenenv_id={2} + """.format(column, table, tenenvId)).one() + + dayFrom = None + dayTo = None + + if maxDate[0] is not None: + dayAfter = maxDate[0] + timedelta(days=1) + dayFrom = dayAfter.strftime('%Y-%m-%d 00:00:00') + + yesterday = date.today() - timedelta(days=1) + dayTo = yesterday.strftime('%Y-%m-%d 23:59:59') + return [dayFrom, dayTo] + + @classmethod + def validateTenenv(cls, tenenvId, session): + try: + tenenvId = session.exec( + """ + SELECT tenenv_info.id FROM tenenv_info + WHERE id={0} + """.format( + tenenvId + ) + ).one() + except NoResultFound: + # if tenenv_id doesn't exist return a relevant message + cls.logger.info("Tenenv with id {0} not found".format(tenenvId)) + print("Tenenv not found") + return False + return True + + @classmethod + def validateHashedUser(cls, hashedUser, tenenvId, session): + # Check if userid exists + try: + session.exec( + """ + SELECT hasheduserid FROM users WHERE hasheduserid='{0}' + AND tenenv_id={1} + """.format( + # hashlib.md5(data["userid"]).hexdigest() #TypeError: + # Strings must be encoded before hashing + hashedUser, tenenvId + ) + ).one() + except NoResultFound: + cls.logger.info("User {0} not found".format(hashedUser)) + return False + return True diff --git a/app/ip_databases/empty b/app/ip_databases/empty new file mode 100644 index 0000000..e69de29 diff --git a/app/logger/__init__.py b/app/logger/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/logger/log.py b/app/logger/log.py new file mode 100644 index 0000000..d38b485 --- /dev/null +++ b/app/logger/log.py @@ -0,0 +1,33 @@ +import logging +import sys +from app.utils import configParser +from logging.handlers import TimedRotatingFileHandler + +FORMATTER = logging.Formatter("""%(asctime)s - %(name)s - %(levelname)s - + %(message)s""") +LOG_FILE = "{0}/{1}".format(configParser.getConfig('logging', 'config.global.py')['folder'], + configParser.getConfig('logging', 'config.global.py')['file']) +LEVEL = configParser.getConfig('logging', 'config.global.py')['level'] + + +def get_console_handler(): + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setFormatter(FORMATTER) + return console_handler + + +def get_file_handler(): + file_handler = TimedRotatingFileHandler(LOG_FILE, when='midnight') + file_handler.setFormatter(FORMATTER) + return file_handler + + +def get_logger(logger_name): + logger = logging.getLogger(logger_name) + logger.setLevel(LEVEL) + logger.addHandler(get_console_handler()) + logger.addHandler(get_file_handler()) + # with this pattern, it's rarely necessary + # to propagate the error up to parent + logger.propagate = False + return logger diff --git a/app/main.py b/app/main.py index a0ee93d..84a65c9 100644 --- a/app/main.py +++ b/app/main.py @@ -1,24 +1,80 @@ -from typing import List, Optional +import os +import sys +from pprint import pprint + +from xmlrpc.client import boolean +from fastapi import Depends, FastAPI, HTTPException, Query, Request, HTTPException, status +from starlette.middleware.cors import CORSMiddleware +from starlette.middleware.sessions import SessionMiddleware -from fastapi import Depends, FastAPI, HTTPException, Query -from fastapi.middleware.cors import CORSMiddleware from sqlmodel import Field, Session, SQLModel, create_engine, select +from sqlalchemy import func +from sqlalchemy.orm import selectinload from app.database import get_session -from app.models import * +from app.models.community_info_model import * +from app.models.community_model import * +from app.models.member_model import * +from app.models.service_model import * +from app.models.country_model import * +from app.models.idp_model import * +from app.models.country_hashed_user_model import * + +from .routers import authenticate, communities, countries, logins, users, dashboard, ams +from app.utils import configParser +from app.utils.fastapiGlobals import GlobalsMiddleware, g + +sys.path.insert(0, os.path.realpath('__file__')) +# Development Environment: dev +environment = os.getenv('API_ENVIRONMENT') + +# Instantiate app according to the environment configuration +app = FastAPI() if environment == "dev" else FastAPI(root_path="/api/v1", + root_path_in_servers=False, + servers=[{"url": "/api/v1"}]) + +if environment == "dev": + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + +app.add_middleware(SessionMiddleware, + secret_key="some-random-string") + +# Globals +app.add_middleware(GlobalsMiddleware) + +# Get the tenant and environment from the request +@app.middleware("http") +async def get_tenacy(request: Request, call_next): + if 'x-tenant' in request.headers: + g.tenant = request.headers['x-tenant'] + elif 'x-tenant' in request.cookies: + g.tenant = request.cookies['x-tenant'] -app = FastAPI() + if 'x-environment' in request.headers: + g.environment = request.headers['x-environment'] + elif 'x-environment' in request.cookies: + g.environment = request.cookies['x-environment'] -origins = ["*"] -app.add_middleware( - CORSMiddleware, - allow_origins=origins, - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], -) + response = await call_next(request) + return response +CommunityReadwithInfo.update_forward_refs( + Community_InfoRead=Community_InfoRead) +Statistics_Country_HashedwithInfo.update_forward_refs( + IdentityprovidersmapRead=IdentityprovidersmapRead, + ServiceprovidersmapRead=ServiceprovidersmapRead, + Country_CodesRead=Country_CodesRead) -@app.get("/ping") -def pong(): - return {"ping": "pong!"} +app.include_router(authenticate.router) +app.include_router(users.router) +app.include_router(communities.router) +app.include_router(countries.router) +app.include_router(logins.router) +app.include_router(dashboard.router) +app.include_router(ams.router) diff --git a/app/models.py b/app/models.py deleted file mode 100644 index 14d9973..0000000 --- a/app/models.py +++ /dev/null @@ -1,74 +0,0 @@ -from typing import List, Optional -from sqlmodel import Field, Relationship, Session, SQLModel -from sqlalchemy import UniqueConstraint -from datetime import datetime - - -# User -class UserBase(SQLModel): - first_name: str - last_name: str - email: str = Field(index=True) - password: str - - -class User(UserBase, table=True): - __table_args__ = (UniqueConstraint("email"),) - id: Optional[int] = Field(default=None, primary_key=True) - - -class UserCreate(UserBase): - pass - - -class UserRead(UserBase): - id: int - - -class UserUpdate(SQLModel): - first_name: Optional[str] = None - last_name: Optional[str] = None - email: Optional[str] = None - password: Optional[str] = None - - -class UserLogin(SQLModel): - email: str - password: str - - -class UserLoginResponse(SQLModel): - id: int - email: str - first_name: str - last_name: str - - -# Communities -class CommunityBase(SQLModel): - name: str - description: str - created_at: datetime = Field(default_factory=datetime.utcnow, nullable=False) - modified_at: Optional[datetime] = None - - -class Community(CommunityBase, table=True): - id: Optional[int] = Field(default=None, primary_key=True) - - -class CommunityRead(CommunityBase): - id: int - - -class UserCommunitysRead(CommunityBase): - id: int - - -class CommunityCreate(CommunityBase): - pass - - -class CommunitiesUpdate(SQLModel): - name: Optional[str] = None - description: Optional[str] = None - modified_at: datetime = Field(default_factory=datetime.utcnow, nullable=False) \ No newline at end of file diff --git a/app/models/__init__.py b/app/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/models/community_info_model.py b/app/models/community_info_model.py new file mode 100644 index 0000000..76f977a --- /dev/null +++ b/app/models/community_info_model.py @@ -0,0 +1,26 @@ +from typing import List, Optional,TYPE_CHECKING +from sqlmodel import Field, Relationship, Session, SQLModel +from sqlalchemy import UniqueConstraint +from datetime import date, datetime + +if TYPE_CHECKING: + from .community_model import Community + from .member_model import Members + +# Communities +class CommunityInfoBase(SQLModel): + name: str + description: str + source: str + #created_at: datetime = Field(default_factory=datetime.utcnow, nullable=False) + #modified_at: Optional[datetime] = None + +class Community_Info(CommunityInfoBase, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + + communities: List["Community"] = Relationship(back_populates="community_info") + members: List["Members"] = Relationship(back_populates="community_info") + +class Community_InfoRead(CommunityInfoBase): + id: int + diff --git a/app/models/community_model.py b/app/models/community_model.py new file mode 100644 index 0000000..5f38bc6 --- /dev/null +++ b/app/models/community_model.py @@ -0,0 +1,24 @@ +from typing import List, Optional,TYPE_CHECKING +from sqlmodel import Field, Relationship, Session, SQLModel +from sqlalchemy import UniqueConstraint +from datetime import date, datetime + +if TYPE_CHECKING: + from .community_info_model import Community_Info, Community_InfoRead + + +class CommunityBase(SQLModel): + created: date = Field(nullable=False) + community_id: int = Field(primary_key=True, foreign_key="community_info.id") + + +class Community(CommunityBase, table=True): + #community_id: Optional[int] = Field(default=None, primary_key=True) + #id: Optional[int] = Field(default=None, primary_key=True) + community_info: "Community_Info" = Relationship(sa_relationship_kwargs={'uselist': False},back_populates="communities") + +class CommunityRead(CommunityBase): + pass + +class CommunityReadwithInfo(CommunityRead): + community_info: Optional["Community_InfoRead"] diff --git a/app/models/country_hashed_user_model.py b/app/models/country_hashed_user_model.py new file mode 100644 index 0000000..3375d57 --- /dev/null +++ b/app/models/country_hashed_user_model.py @@ -0,0 +1,33 @@ +from typing import List, Optional,TYPE_CHECKING +from sqlmodel import Field, Relationship, Session, SQLModel +from sqlalchemy import UniqueConstraint +from datetime import date, datetime + +if TYPE_CHECKING: + from .idp_model import * + from .service_model import * + from .country_model import * + + +class Statistics_Country_HashedBase(SQLModel): + date: date + hasheduserid: str + sourceidpid: int = Field(foreign_key="identityprovidersmap.id") + serviceid : int = Field(foreign_key="serviceprovidersmap.id") + countryid: int = Field(foreign_key="country_codes.id") + count: int + + +class Statistics_Country_Hashed(Statistics_Country_HashedBase, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + identityprovider_info: "Identityprovidersmap" = Relationship(sa_relationship_kwargs={'uselist': False},back_populates="idps") + serviceprovider_info: "Serviceprovidersmap" = Relationship(sa_relationship_kwargs={'uselist': False},back_populates="services") + country_info: "Country_Codes" = Relationship(sa_relationship_kwargs={'uselist': False},back_populates="countries") + +class Statistics_Country_HashedRead(Statistics_Country_HashedBase): + pass + +class Statistics_Country_HashedwithInfo(Statistics_Country_HashedRead): + identityprovider_info: Optional["IdentityprovidersmapRead"] + serviceprovider_info: Optional["ServiceprovidersmapRead"] + country_info: Optional["Country_CodesRead"] diff --git a/app/models/country_model.py b/app/models/country_model.py new file mode 100644 index 0000000..6ab7812 --- /dev/null +++ b/app/models/country_model.py @@ -0,0 +1,22 @@ +from typing import List, Optional,TYPE_CHECKING +from sqlmodel import Field, Relationship, Session, SQLModel +from sqlalchemy import UniqueConstraint +from datetime import date, datetime + +if TYPE_CHECKING: + from .country_hashed_user_model import Statistics_Country_Hashed + from .member_model import Members + +# Communities +class Country_CodesBase(SQLModel): + countrycode: str + country: str + + +class Country_Codes(Country_CodesBase, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + countries: List["Statistics_Country_Hashed"] = Relationship(back_populates="country_info") + +class Country_CodesRead(Country_CodesBase): + id: int + diff --git a/app/models/environment_model.py b/app/models/environment_model.py new file mode 100644 index 0000000..71c0489 --- /dev/null +++ b/app/models/environment_model.py @@ -0,0 +1,20 @@ +from typing import List, Optional,TYPE_CHECKING +from sqlmodel import Field, Relationship, Session, SQLModel +from sqlalchemy import UniqueConstraint +from datetime import date, datetime + +if TYPE_CHECKING: + from .country_hashed_user_model import Statistics_Country_Hashed + +# EnvironmentInfo +class EnvironmentInfoBase(SQLModel): + name: str + description: str + +class EnvironmentInfo(EnvironmentInfoBase, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + #idps: List["Statistics_Country_Hashed"] = Relationship(back_populates="identityprovider_info") + +class EnvironmentInfoRead(EnvironmentInfoBase): + id: int + diff --git a/app/models/idp_model.py b/app/models/idp_model.py new file mode 100644 index 0000000..53a85df --- /dev/null +++ b/app/models/idp_model.py @@ -0,0 +1,20 @@ +from typing import List, Optional,TYPE_CHECKING +from sqlmodel import Field, Relationship, Session, SQLModel +from sqlalchemy import UniqueConstraint +from datetime import date, datetime + +if TYPE_CHECKING: + from .country_hashed_user_model import Statistics_Country_Hashed + +# IdPs +class IdentityprovidersmapBase(SQLModel): + entityid: str + name: str + +class Identityprovidersmap(IdentityprovidersmapBase, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + idps: List["Statistics_Country_Hashed"] = Relationship(back_populates="identityprovider_info") + +class IdentityprovidersmapRead(IdentityprovidersmapBase): + id: int + diff --git a/app/models/member_model.py b/app/models/member_model.py new file mode 100644 index 0000000..820b47d --- /dev/null +++ b/app/models/member_model.py @@ -0,0 +1,26 @@ +from typing import List, Optional,TYPE_CHECKING +from sqlmodel import Field, Relationship, Session, SQLModel +from sqlalchemy import UniqueConstraint +from datetime import date, datetime + +if TYPE_CHECKING: + from .community_model import Community_Info,Community_InfoRead + +class MemberBase(SQLModel): + community_id: int = Field(primary_key=True, foreign_key="community_info.id") + hasheduserid: str = Field(primary_key=True) + status: str + +class Members(MemberBase, table=True): + community_info: "Community_Info" = Relationship(sa_relationship_kwargs={'uselist': False},back_populates="members") + +class MembersRead(MemberBase): + pass + +class MembersReadWithCommunityInfo(MembersRead): + community_info: "Community_InfoRead" + + + + + \ No newline at end of file diff --git a/app/models/service_model.py b/app/models/service_model.py new file mode 100644 index 0000000..8daaf8b --- /dev/null +++ b/app/models/service_model.py @@ -0,0 +1,20 @@ +from typing import List, Optional,TYPE_CHECKING +from sqlmodel import Field, Relationship, Session, SQLModel +from sqlalchemy import UniqueConstraint +from datetime import date, datetime + +if TYPE_CHECKING: + from .country_hashed_user_model import Statistics_Country_Hashed + +# Communities +class ServiceprovidersmapBase(SQLModel): + identifier: str + name: str + +class Serviceprovidersmap(ServiceprovidersmapBase, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + services: List["Statistics_Country_Hashed"] = Relationship(back_populates="serviceprovider_info") + +class ServiceprovidersmapRead(ServiceprovidersmapBase): + id: int + diff --git a/app/models/tenant_model.py b/app/models/tenant_model.py new file mode 100644 index 0000000..ced41fc --- /dev/null +++ b/app/models/tenant_model.py @@ -0,0 +1,20 @@ +from typing import List, Optional,TYPE_CHECKING +from sqlmodel import Field, Relationship, Session, SQLModel +from sqlalchemy import UniqueConstraint +from datetime import date, datetime + +if TYPE_CHECKING: + from .country_hashed_user_model import Statistics_Country_Hashed + +# TenenvInfo +class TenenvInfoBase(SQLModel): + name: str + description: str + +class TenenvInfo(TenenvInfoBase, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + #idps: List["Statistics_Country_Hashed"] = Relationship(back_populates="identityprovider_info") + +class TenenvInfoRead(TenenvInfoBase): + id: int + diff --git a/app/routers/__init__.py b/app/routers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/routers/ams.py b/app/routers/ams.py new file mode 100644 index 0000000..30c716a --- /dev/null +++ b/app/routers/ams.py @@ -0,0 +1,136 @@ +import base64 +import json +import hashlib +from fastapi import APIRouter, Depends, HTTPException, Response, Request, Body, Header, Security +from sqlmodel import Field, Session, SQLModel, create_engine, select +from typing import Union +from app.utils import configParser, globalMethods +from app.database import get_session +from app.utils.globalMethods import AuthNZCheck +from fastapi.responses import PlainTextResponse +from app.logger import log +from fastapi.security import HTTPBearer +from starlette.responses import JSONResponse +from sqlalchemy.exc import NoResultFound +from app.utils.ipDatabase import geoip2Database +from typing import Optional +# from ..dependencies import get_token_header + +router = APIRouter( + tags=["ams"] +) + +logger = log.get_logger("ams") + + +@router.get("/ams_stats/ams_verification_hash") +async def get_verification(response: Response): + + verification_hash = configParser.getConfig('ams', 'config.global.py')['verification_hash'] + response.status_code = 200 + response.headers["Content-Type"] = "plain/text" + return PlainTextResponse(verification_hash) + + +async def verify_authorization_header(Authorization: Optional[str] = Header(None)): + authkey = configParser.getConfig('ams', 'config.global.py')['auth_key'] + # check authorization + if (Authorization != authkey): + HTTPException(status_code=401) + # response.status_code = 401 + # return PlainTextResponse('Client Certificate Authentication Failure') + return Authorization + + +@router.post("/ams_stats") +async def get_ams_stats(*, + session: Session = Depends(get_session), + request: Request, + response: Response, + body = Body(..., example={"name": "Item Name"}), + Authorization: str = Depends(verify_authorization_header)): + + response.status_code = 200 + # Access the request data + data = await request.json() + logger.debug(data) + messages = data.get("messages", []) # Retrieve the list of messages + if not messages: # if only one message exists + try: + data_dict = process_message(data.get("message").get("data")) + process_data(data_dict, session) + except Exception as e: + logger.error(f"Error: {e}") + else: + for item in data.get("messages", []): + try: + data_dict = process_message(item.get("message").get("data")) + process_data(data_dict, session) + except Exception as e: + logger.error(f"Error: {e}") + + return JSONResponse({"message": "Endpoint called successfully"}) + + +def process_message(message): + decoded_data = base64.b64decode(message).decode() + logger.debug(decoded_data) + # Process the data + print(decoded_data) + # Convert the JSON-formatted string to a Python dictionary + data_dict = json.loads(decoded_data) + return data_dict + + +def process_data(data, session): + print(data["date"]) + if ("tenenvId" not in data + or "type" not in data + or "eventIdentifier" not in data + or "source" not in data + or "tenenvId" not in data): + + raise MissingDataException("One or more required attributes are missing.") + + if "ipAddress" in data: + # handler for ip databases + ipDatabaseHandler = geoip2Database() + countryData = ["", ""] + # get country code/ name + countryData[0] = 'UN' + countryData[1] = 'Unknown' + try: + countryData = ipDatabaseHandler.getCountryFromIp(data["ipAddress"]) + except Exception: + print("Unknown ip Address") + + data["countryCode"] = countryData[0] + data["countryName"] = countryData[1] + del data["ipAddress"] + if "voPersonId" in data: + # hash voPersonId + data["voPersonId"] = hashlib.md5(data['voPersonId'].encode()).hexdigest() + print(data) + session.exec( + """ + INSERT INTO statistics_raw(date, type, event_identifier, source, + tenenv_id, jsondata) + VALUES ('{0}', '{1}', '{2}', '{3}', '{4}','{5}') + ON CONFLICT (event_identifier, source, tenenv_id) + DO NOTHING + """.format( + data["date"], + data["type"], + data['eventIdentifier'], + data['source'], + data['tenenvId'], + json.dumps(data) + ) + ) + session.commit() + + return JSONResponse({"message": "Endpoint called successfully"}) + + +class MissingDataException(Exception): + pass \ No newline at end of file diff --git a/app/routers/authenticate.py b/app/routers/authenticate.py new file mode 100644 index 0000000..bc4474b --- /dev/null +++ b/app/routers/authenticate.py @@ -0,0 +1,180 @@ +from pprint import pprint +from typing import Annotated, Any + +from fastapi import APIRouter, Depends, HTTPException, status, Security, Request +from fastapi.responses import JSONResponse +import json, jwt + +from app.utils import configParser +import urllib.parse +from starlette.responses import HTMLResponse, RedirectResponse +from authlib.integrations.starlette_client import OAuth, OAuthError + +from app.utils.globalMethods import permissionsCalculation, g + +router = APIRouter( + tags=["authenticate"], + # dependencies=[Depends(get_token_header)], + # responses={404: {"description": "Not found"}}, +) + +def initializeAuthOb(): + config_file = 'config.' + g.tenant + '.' + g.environment + '.py' + oidc_config = configParser.getConfig('oidc_client', config_file) + oauth = OAuth() + + oauth.register( + 'rciam', + client_id=oidc_config['client_id'], + client_secret=oidc_config['client_secret'], + server_metadata_url=oidc_config['issuer'] + "/.well-known/openid-configuration", + client_kwargs={'scope': 'openid profile email voperson_id eduperson_entitlement'} + ) + return oauth + +def getServerConfig(): + config_file = 'config.' + g.tenant + '.' + g.environment + '.py' + return configParser.getConfig('server_config', config_file) + +@router.get('/login', + include_in_schema=False + ) +async def login_endpoint( + request: Request, + oauth_ob= Depends(initializeAuthOb), + server_config= Depends(getServerConfig)): + rciam = oauth_ob.create_client('rciam') + redirect_uri = server_config['protocol'] + "://" + server_config['host'] + server_config['api_path'] + "/auth" + return await rciam.authorize_redirect(request, redirect_uri) + + +@router.get('/auth', + include_in_schema=False, + response_class=RedirectResponse) +async def authorize_rciam( + request: Request, + oauth_ob= Depends(initializeAuthOb), + server_config=Depends(getServerConfig) +): + login_start_url = request.cookies.get("login_start") + # pprint(request.cookies.get("login_start")) + if not login_start_url: + login_start_url = "/" + + # Set cookies when returning a RedirectResponse + # https://github.com/tiangolo/fastapi/issues/2452 + # Creating our own redirect url is what make it possible + # to add the cookie + response = RedirectResponse(url=urllib.parse.unquote(login_start_url)) + response.delete_cookie("login_start") + + rciam = oauth_ob.create_client('rciam') + try: + token = await rciam.authorize_access_token(request) + except OAuthError as error: + return HTMLResponse(f'

{error.error}

') + user = token.get('userinfo') + pprint(token) + + if user: + request.session['user'] = dict(user) + # Fetch the userinfo data + if user.get("email") is None: + metadata = await rciam.load_server_metadata() + if not metadata['userinfo_endpoint']: + raise RuntimeError('Missing "userinfo_endpoint" value') + # Make a request to the userinfo endpoint + user_info = await rciam.get(metadata['userinfo_endpoint'], token=token) + user_info.raise_for_status() + user_info_data = user_info.json() + # Encode the data to jwt + # todo: the key could become configurable and per tenenv + jwt_user = jwt.encode(payload=user_info_data, + key="a custom key", + algorithm="HS256") + # print(jwt_user) + + # XXX The max_age of the cookie is the same as the + # access token max age which we extract from the token + # itself + response.headers["Access-Control-Expose-Headers"] = "X-Permissions, X-Authenticated, X-Redirect" + response.set_cookie(key="userinfo", + value=jwt_user, + secure=None, + max_age=token.get('expires_in'), + domain=server_config['domain']) + + response.set_cookie(key="idtoken", + value=token.get('id_token'), + secure=None, + max_age=token.get('expires_in'), + domain=server_config['domain']) + + response.set_cookie(key="atoken", + value=token.get('access_token'), + secure=None, + max_age=token.get('expires_in'), + domain=server_config['domain']) + response.headers["X-Authenticated"] = "true" + + # Authorization + authorize_file = 'authorize.' + g.tenant + '.' + g.environment + '.py' + permissions = permissionsCalculation(authorize_file, user_info_data) + permissions_json = json.dumps(permissions).replace(" ", "").replace("\n", "") + + # Set the permissions cookie. + jwt_persmissions = jwt.encode(payload=permissions, + key="a custom key", + algorithm="HS256") + response.set_cookie(key="permissions", + value=jwt_persmissions, + secure=None, + max_age=token.get('expires_in'), + domain=server_config['domain']) + # Add the permission to a custom header field + response.headers["X-Permissions"] = permissions_json + + return response + + +@router.get('/logout', + include_in_schema=False, + response_class=RedirectResponse) +async def logout( + request: Request, + oauth_ob= Depends(initializeAuthOb), + server_config=Depends(getServerConfig) +): + rciam = oauth_ob.create_client('rciam') + metadata = await rciam.load_server_metadata() + # todo: Fix this after we complete the multitenacy + redirect_uri = server_config['protocol'] + "://" + server_config['client'] +"/metrics" + logout_endpoint = metadata['end_session_endpoint'] + "?post_logout_redirect_uri=" + urllib.parse.unquote( + redirect_uri) + "&id_token_hint=" + request.cookies.get("idtoken") + + request.session.pop('user', None) + + # Set cookies when returning a RedirectResponse + # https://github.com/tiangolo/fastapi/issues/2452 + response = RedirectResponse(url=logout_endpoint) + response.set_cookie('userinfo', + expires=0, + max_age=0, + domain=server_config['domain']) + + response.set_cookie('idtoken', + expires=0, + max_age=0, + domain=server_config['domain']) + + response.set_cookie(key="atoken", + expires=0, + max_age=0, + domain=server_config['domain']) + + response.set_cookie(key="permissions", + expires=0, + max_age=0, + domain=server_config['domain']) + + return response diff --git a/app/routers/communities.py b/app/routers/communities.py new file mode 100644 index 0000000..83047e9 --- /dev/null +++ b/app/routers/communities.py @@ -0,0 +1,137 @@ +from fastapi import APIRouter, Depends, HTTPException +from sqlmodel import Field, Session, SQLModel, create_engine, select +from typing import Union + +from app.database import get_session +from app.models.community_info_model import * +from app.models.community_model import * +from app.models.member_model import MembersReadWithCommunityInfo +from app.utils.globalMethods import AuthNZCheck + + +MembersReadWithCommunityInfo.update_forward_refs( + Community_InfoRead=Community_InfoRead) + +router = APIRouter( + tags=["communities"], + dependencies=[Depends(AuthNZCheck("communities"))] +) + +@router.get("/members/", response_model=List[MembersReadWithCommunityInfo]) +async def read_members( + *, + session: Session = Depends(get_session), + offset: int = 0, + # community_id: Union[None, int] = None +): + # if not community_id: + # members = session.exec(select(Members).offset(offset)).all() + # else: + members = session.exec(select(Members).offset(offset)).all() + return members + +@router.get("/min_date_communities") +async def read_min_date_communities( + *, + session: Session = Depends(get_session), + offset: int = 0, + tenenv_id: int, +): + min_date = session.exec(""" + SELECT min(created) as min_date + FROM community + WHERE tenenv_id={0} + """.format(tenenv_id)).one() + return min_date +@router.get("/members_bystatus/") +async def read_members_bystatus( + *, + session: Session = Depends(get_session), + offset: int = 0, + community_id: Union[None, int] = None, + tenenv_id: int, +): + if not community_id: + members = session.exec(select(Members).offset(offset)).all() + else: + # members = session.exec(select(Members).offset(offset)).all() + members = session.exec(""" + SELECT count(*) as count, community_id, status FROM members + WHERE community_id={0} AND tenenv_id={1} + GROUP BY community_id, status + """.format(community_id, tenenv_id)).all() + # members = session.exec(""" SELECT community_id FROM members """) + return members + + +@router.get("/communities_groupby/{group_by}") +async def read_communities( + *, + session: Session = Depends(get_session), + offset: int = 0, + group_by: str, + tenenv_id: int, + interval: Union[str, None] = None, + count_interval: int = None, + startDate: str = None, + endDate: str = None, +): + interval_subquery = "" + if group_by: + if interval and count_interval: + interval_subquery = """WHERE created > + date_trunc('{0}', CURRENT_DATE) - INTERVAL '{1} {2}'""".format(group_by, count_interval, interval) + if startDate and endDate: + interval_subquery = """ + WHERE created BETWEEN '{0}' AND '{1}' + """.format(startDate, endDate) + if interval_subquery == "": + interval_subquery = """ + WHERE community.tenenv_id={0} + """.format(tenenv_id) + else: + interval_subquery += """ AND community.tenenv_id={0} + """.format(tenenv_id) + + communities = session.exec(""" + select count(*) as count, date_trunc( '{0}', created ) as range_date, + min(created) as min_date , string_agg(name,'|| ') as names, + string_agg(to_char(created, 'YYYY-MM-DD'),', ') as created_date, + string_agg(description,'|| ') as description + from community + join community_info on community.community_id=community_info.id + {1} + group by range_date + ORDER BY range_date ASC + """.format(group_by, interval_subquery)).all() + return communities + + +@router.get("/communities/") +async def read_community( + *, + session: Session = Depends(get_session), + community_id: Union[None, int] = None, + tenenv_id: int): + sql_subquery = '' + if community_id: + sql_subquery = 'id={0} and'.format(community_id) + community = session.exec(""" + SELECT * FROM community_info WHERE {0} tenenv_id={1} + """.format(sql_subquery, tenenv_id)).all() + # statement = select(Community).options(selectinload(Community.community_info)) + # result = session.exec(statement) + # community = result.one() + # if not community: + # raise HTTPException(status_code=404, detail="Community not found") + return community + + +@router.get("/communities_info/", response_model=List[Community_InfoRead]) +async def read_communities_info( + *, + session: Session = Depends(get_session), + offset: int = 0 +): + communities = session.exec(select(Community_Info).offset(offset)).all() + return communities diff --git a/app/routers/countries.py b/app/routers/countries.py new file mode 100644 index 0000000..c744f11 --- /dev/null +++ b/app/routers/countries.py @@ -0,0 +1,102 @@ +from fastapi import APIRouter, Depends, HTTPException, Query +from app.database import get_session +from sqlmodel import Field, Session, SQLModel, create_engine, select +from typing import Union + +from app.models.country_model import * +from app.models.country_hashed_user_model import * +from app.utils.globalMethods import AuthNZCheck + + +# from ..dependencies import get_token_header + +router = APIRouter( + tags=["countries"], + dependencies=[Depends(AuthNZCheck)], + # responses={404: {"description": "Not found"}}, +) + + +@router.get("/countries/", response_model=List[Country_CodesRead]) +async def read_countries( + *, + session: Session = Depends(get_session), + offset: int = 0 +): + countries = session.exec(select(Country_Codes).offset(offset)).all() + return countries + + +@router.get("/country_stats/", response_model=List[Statistics_Country_HashedwithInfo]) +async def read_country_stats( + *, + session: Session = Depends(get_session), + offset: int = 0 +): + stats = session.exec( + select(Statistics_Country_Hashed).offset(offset)).all() + return stats + + +@router.get("/country_stats_by_vo/{community_id}") +async def read_country_stats_by_vo( + *, + session: Session = Depends(get_session), + offset: int = 0, + community_id: Union[None, int] = None +): + stats_country = session.exec(""" + WITH users_countries AS ( + SELECT statistics_country_hashed.hasheduserid as userid, status, country, countrycode, count(*) as sum_count + FROM statistics_country_hashed + JOIN members ON members.hasheduserid=statistics_country_hashed.hasheduserid + JOIN country_codes ON countryid=country_codes.id + WHERE community_id={0} AND country!='Unknown' + GROUP BY userid, status, country, countrycode + ), + max_count_users_countries AS ( + SELECT DISTINCT userid, status, max(sum_count) as max_sum_count,row_number() OVER (ORDER BY userid, status) as row_number + FROM users_countries + GROUP BY userid, status + ) + SELECT country,countrycode,count(*) as sum + FROM users_countries + JOIN ( + SELECT userid, status, max_sum_count, max(row_number) + FROM max_count_users_countries GROUP BY userid, status, max_sum_count + ) max_count_users_countries_no_duplicates + ON users_countries.userid=max_count_users_countries_no_duplicates.userid + AND users_countries.sum_count=max_count_users_countries_no_duplicates.max_sum_count + GROUP BY country,countrycode + ORDER BY country; + """.format(community_id)).all() + status_per_country = session.exec(""" + WITH users_countries AS ( + SELECT statistics_country_hashed.hasheduserid as userid, status, country, countrycode, count(*) as sum_count + FROM statistics_country_hashed + JOIN members ON members.hasheduserid=statistics_country_hashed.hasheduserid + JOIN country_codes ON countryid=country_codes.id + WHERE community_id={0} AND country!='Unknown' + GROUP BY userid, status, country, countrycode + ), + max_count_users_countries AS ( + SELECT DISTINCT userid, status, max(sum_count) as max_sum_count, row_number() OVER (ORDER BY userid, status) as row_number + FROM users_countries + GROUP BY userid, status + ) + SELECT country,countrycode, users_countries.status, count(*) as sum + FROM users_countries + JOIN ( + SELECT userid, status, max_sum_count, max(row_number) + FROM max_count_users_countries GROUP BY userid, status, max_sum_count + ) max_count_users_countries_no_duplicates + ON users_countries.userid=max_count_users_countries_no_duplicates.userid + AND users_countries.sum_count=max_count_users_countries_no_duplicates.max_sum_count + GROUP BY country,countrycode, users_countries.status + ORDER BY country; + """.format(community_id)).all() + + return { + 'stats': stats_country, + 'status': status_per_country + } diff --git a/app/routers/dashboard.py b/app/routers/dashboard.py new file mode 100644 index 0000000..2e6a46d --- /dev/null +++ b/app/routers/dashboard.py @@ -0,0 +1,85 @@ +from fastapi import APIRouter, Depends, HTTPException, Request +from sqlmodel import Field, Session, SQLModel, create_engine, select +from typing import Union +from xmlrpc.client import boolean + +from app.database import get_session +from app.utils.globalMethods import AuthNZCheck + +router = APIRouter( + tags=["dashboard"], + dependencies=[Depends(AuthNZCheck("dashboard", True))] +) + +@router.get("/tenenv/{tenant_name}/{environment_name}") +async def read_tenenv_byname( + *, + session: Session = Depends(get_session), + offset: int = 0, + tenant_name: str, + environment_name: str +): + tenenv = None + if tenant_name and environment_name: + tenenv = session.exec(""" + SELECT tenenv_info.* FROM tenenv_info + JOIN tenant_info ON tenant_info.id=tenant_id + AND LOWER(tenant_info.name)=LOWER('{0}') + JOIN environment_info ON environment_info.id=env_id + AND LOWER(environment_info.name)=LOWER('{1}') + """.format(tenant_name, environment_name)).all() + return tenenv + + +@router.get("/environment_byname/{environment_name}") +async def read_environment_byname( + *, + session: Session = Depends(get_session), + offset: int = 0, + environment_name: str +): + environment = None + if environment_name: + environment = session.exec(""" + SELECT * FROM environment_info + WHERE name='{0}' LIMIT 1 + """.format(environment_name)).all() + return environment + + +@router.get("/idps") +async def read_idps( + *, + session: Session = Depends(get_session), + tenenv_id: int, + idpId: int = None +): + idpId_subquery = "" + if idpId: + idpId_subquery = """ + AND id = {0} + """.format(idpId) + idps = session.exec(""" + SELECT * FROM identityprovidersmap + WHERE tenenv_id='{0}' {1} + """.format(tenenv_id, idpId_subquery)).all() + return idps + + +@router.get("/sps") +async def read_sps( + *, + session: Session = Depends(get_session), + tenenv_id: int, + spId: int = None +): + spId_subquery = "" + if spId: + spId_subquery = """ + AND id = {0} + """.format(spId) + sps = session.exec(""" + SELECT * FROM serviceprovidersmap + WHERE tenenv_id='{0}' {1} + """.format(tenenv_id, spId_subquery)).all() + return sps diff --git a/app/routers/logins.py b/app/routers/logins.py new file mode 100644 index 0000000..8d151fa --- /dev/null +++ b/app/routers/logins.py @@ -0,0 +1,392 @@ +from pprint import pprint + +from fastapi import APIRouter, Depends, HTTPException, Request +from sqlmodel import Field, Session, SQLModel, create_engine, select +from starlette.responses import JSONResponse +from sqlalchemy.exc import NoResultFound +from typing import Union +from xmlrpc.client import boolean + +from app.database import get_session +from app.utils.globalMethods import AuthNZCheck + + +# LOGINS ROUTES ARE OPEN + +router = APIRouter( + tags=["logins"], + dependencies=[Depends(AuthNZCheck("logins", True))] +) + +@router.get("/min_date_logins") +async def read_min_date_logins( + *, + request: Request, + session: Session = Depends(get_session), + tenenv_id: int, + unique_logins: Union[boolean, None] = False, +): + unique_logins_subquery = "" + if unique_logins: + unique_logins_subquery = "AND hasheduserid != 'unknown'" + + min_date = session.exec("""SELECT min(date) as min_date, max(date) as max_date + FROM statistics_country_hashed + WHERE tenenv_id={0} {1}""".format(tenenv_id, unique_logins_subquery)).one() + return min_date + +@router.get("/logins_per_idp") +async def read_logins_per_idp( + *, + request: Request, + session: Session = Depends(get_session), + offset: int = 0, + sp: str = None, # type: ignore + startDate: str = None, # type: ignore + endDate: str = None, # type: ignore + tenenv_id: int, + unique_logins: Union[boolean, None] = False, +): + interval_subquery = "" + sp_subquery_join = "" + if sp: + # Is the user authenticated? + AuthNZCheck("logins", False) + + # Fetch the data + sp_subquery_join = """ + JOIN serviceprovidersmap ON serviceprovidersmap.id=serviceid + AND serviceprovidersmap.tenenv_id=statistics_country_hashed.tenenv_id + AND serviceprovidersmap.tenenv_id={1} + AND serviceid = '{0}' + """.format( + sp, tenenv_id + ) + unique_logins_subquery = "" + if unique_logins: + unique_logins_subquery = "AND hasheduserid != 'unknown'" + + if startDate and endDate: + interval_subquery = """ + AND date BETWEEN '{0}' AND '{1}' + """.format(startDate, endDate) + if unique_logins == False: + sub_select = """ + sum(count) as count + """ + else: + sub_select = """ + count(DISTINCT hasheduserid) as count + """ + + logins = session.exec(""" + select identityprovidersmap.id, identityprovidersmap.name, entityid, sourceidpid, {0} + from statistics_country_hashed + join identityprovidersmap ON identityprovidersmap.id=sourceidpid + AND identityprovidersmap.tenenv_id=statistics_country_hashed.tenenv_id + {1} + WHERE statistics_country_hashed.tenenv_id = {2} + {3} {4} + GROUP BY identityprovidersmap.id, sourceidpid, identityprovidersmap.name, entityid + ORDER BY count DESC + """.format( + sub_select, sp_subquery_join, tenenv_id, interval_subquery, unique_logins_subquery + ) + ).all() + + return logins + + +@router.get("/logins_per_sp") +async def read_logins_per_sp( + *, + session: Session = Depends(get_session), + request: Request, + offset: int = 0, + idp: str = None, + startDate: str = None, + endDate: str = None, + tenenv_id: int, + unique_logins: Union[boolean, None] = False, +): + unique_logins_subquery = "" + if unique_logins: + unique_logins_subquery = "AND hasheduserid != 'unknown'" + + interval_subquery = "" + idp_subquery_join = "" + if idp: + # Is the user authenticated? + AuthNZCheck("logins", False) + + # Fetch the data + idp_subquery_join = """ + JOIN identityprovidersmap ON identityprovidersmap.id=sourceidpid + AND identityprovidersmap.tenenv_id=statistics_country_hashed.tenenv_id + AND identityprovidersmap.tenenv_id={1} + AND identityprovidersmap.id = '{0}' + """.format( + idp, tenenv_id + ) + + if startDate and endDate: + interval_subquery = """ + AND date BETWEEN '{0}' AND '{1}' + """.format(startDate, endDate) + + if unique_logins == False: + sub_select = """ + sum(count) as count + """ + else: + sub_select = """ + count(DISTINCT hasheduserid) as count + """ + + logins = session.exec(""" + select serviceprovidersmap.id, serviceprovidersmap.name, identifier, serviceid, {0} + from statistics_country_hashed + join serviceprovidersmap ON serviceprovidersmap.id=serviceid + AND serviceprovidersmap.tenenv_id=statistics_country_hashed.tenenv_id + {1} + WHERE statistics_country_hashed.tenenv_id = {2} + {3} {4} + GROUP BY serviceprovidersmap.id, serviceid, serviceprovidersmap.name, identifier + ORDER BY count DESC + """.format( + sub_select, idp_subquery_join, tenenv_id, interval_subquery, unique_logins_subquery + ) + ).all() + return logins + + +@router.get("/logins_per_country") +async def read_logins_per_country( + *, + session: Session = Depends(get_session), + offset: int = 0, + group_by: Union[str, None] = None, + startDate: str = None, + endDate: str = None, + tenenv_id: int, + unique_logins: Union[boolean, None] = False, + idpId: Union[int, None] = None, + spId: Union[int, None] = None, +): + unique_logins_subquery = "" + if unique_logins: + unique_logins_subquery = "AND hasheduserid != 'unknown'" + + interval_subquery = "" + entity_subquery = "" + sp_subquery = "" + if idpId: + entity_subquery = """ + AND sourceidpid = {0} + """.format(idpId) + if spId: + sp_subquery = """ + AND serviceid = {0} + """.format(spId) + if group_by: + if startDate and endDate: + interval_subquery = """ + AND date BETWEEN '{0}' AND '{1}' + """.format(startDate, endDate) + + if unique_logins == False: + sub_select = """ + sum(count) as count_country + """ + sum = "sum(count)" + else: + sub_select = """ + count(DISTINCT hasheduserid) as count_country + """ + sum = "count(DISTINCT hasheduserid)" + logins = session.exec(""" + SELECT range_date, sum(count_country) as count, min(min_login_date) as min_date, STRING_AGG(country, '|| ') as countries + FROM ( + SELECT date_trunc('{0}', date) as range_date, min(date) as min_login_date, {1}, CONCAT(country,': ',{2}) as country + from statistics_country_hashed + JOIN country_codes ON countryid=country_codes.id + WHERE tenenv_id = {3} + {4} {5} {6} {7} + GROUP BY range_date, country + ORDER BY range_date,country ASC + ) country_logins + GROUP BY range_date + """.format( + group_by, + sub_select, + sum, + tenenv_id, + interval_subquery, + entity_subquery, + sp_subquery, + unique_logins_subquery + ) + ).all() + else: + if startDate and endDate: + interval_subquery = """ + AND date BETWEEN '{0}' AND '{1}' + """.format(startDate, endDate) + + if unique_logins == False: + sub_select = """ + sum(count) as sum + """ + else: + sub_select = """ + count(DISTINCT hasheduserid) as sum + """ + logins = session.exec(""" + SELECT country, countrycode, {0} + FROM statistics_country_hashed + JOIN country_codes ON countryid=country_codes.id + WHERE tenenv_id = {1} + {2} {3} {4} + GROUP BY country,countrycode + """.format( + sub_select, tenenv_id, interval_subquery, entity_subquery, sp_subquery + ) + ).all() + return logins + + +@router.get("/logins_countby") +async def read_logins_countby( + *, + session: Session = Depends(get_session), + offset: int = 0, + interval: Union[str, None] = None, + count_interval: int = None, + tenenv_id: int, + unique_logins: Union[boolean, None] = False, + idpId: Union[int, None] = None, + spId: Union[int, None] = None, +): + + interval_subquery = "" + idp_subquery = "" + sp_subquery = "" + if interval and count_interval: + interval_subquery = """AND date > + CURRENT_DATE - INTERVAL '{0} {1}'""".format(count_interval, interval) + if idpId: + idp_subquery = """ + AND sourceidpid = '{0}' + """.format(idpId) + if spId: + sp_subquery = """ + AND serviceid = '{0}' + """.format(spId) + if unique_logins == False: + logins = session.exec(""" + select sum(count) as count + from statistics_country_hashed WHERE tenenv_id={0} + {1} {2} {3} + """.format( + tenenv_id, interval_subquery, idp_subquery, sp_subquery + ) + ).all() + else: + logins = session.exec(""" + select count(DISTINCT hasheduserid) as count + from statistics_country_hashed WHERE tenenv_id={0} AND hasheduserid != 'unknown' + {1} {2} {3} + """.format( + tenenv_id, interval_subquery, idp_subquery, sp_subquery + ) + ).all() + return logins + +@router.get("/logins_groupby/{group_by}") +async def read_logins_groupby( + *, + session: Session = Depends(get_session), + request: Request, + offset: int = 0, + group_by: str, + idp: str = None, + sp: str = None, + tenenv_id: int, + unique_logins: Union[boolean, None] = False, +): + days_seq_subquery = "" + if unique_logins: + days_seq_subquery = " AND hasheduserid != 'unknown'" + days_seq_table = """ + with days as (select generate_series( + (select date_trunc('day', min(date)) + from statistics_country_hashed + where statistics_country_hashed.tenenv_id = {0} {1})::timestamp, + (select date_trunc('day', max(date)) + from statistics_country_hashed + where statistics_country_hashed.tenenv_id = {0} {1}), + '1 day'::interval + ) as day) + """.format(tenenv_id, days_seq_subquery) + + interval_subquery = "" + if idp != None: + # Is the user authenticated? + AuthNZCheck("logins", False) + + # Fetch the data + interval_subquery = """ + JOIN identityprovidersmap ON sourceidpid=identityprovidersmap.id + AND identityprovidersmap.tenenv_id=statistics_country_hashed.tenenv_id + WHERE identityprovidersmap.id = '{0}' + """.format(idp) + elif sp != None: + # Is the user authenticated? + AuthNZCheck("logins", False) + + # Fetch the data + interval_subquery = """ + JOIN serviceprovidersmap ON serviceid=serviceprovidersmap.id + AND serviceprovidersmap.tenenv_id=statistics_country_hashed.tenenv_id + WHERE serviceprovidersmap.id = '{0}' + """.format(sp) + if interval_subquery == "": + interval_subquery = ( + """WHERE statistics_country_hashed.tenenv_id = {0}""".format(tenenv_id) + ) + else: + interval_subquery += ( + """ AND statistics_country_hashed.tenenv_id = {0} """.format(tenenv_id) + ) + logins_count_raw = """ + select sum(count) as count, date_trunc('{0}', date) as date + from statistics_country_hashed + {1} + GROUP BY date_trunc('{0}', date) + ORDER BY date_trunc('{0}', date) ASC + """.format(group_by, interval_subquery) + if unique_logins is True: + logins_count_raw = """ + select count(DISTINCT hasheduserid) as count, date_trunc('{0}', date) as date + from statistics_country_hashed + {1} {2} + GROUP BY date_trunc('{0}', date) + ORDER BY date_trunc('{0}', date) ASC + """.format(group_by, interval_subquery, days_seq_subquery) + + # print(""" + # {0}, + # logins_count as ({1}) + # select days.day as date, logins_count.count as count + # from days left join logins_count on logins_count.date = days.day + # ORDER BY date ASC; + # """.format(days_seq_table, logins_count_raw)) + + logins = session.exec(""" + {0}, + logins_count as ({1}) + select days.day as date, logins_count.count as count + from days left join logins_count on logins_count.date = days.day + ORDER BY date ASC; + """.format(days_seq_table, logins_count_raw)).all() + return logins diff --git a/app/routers/users.py b/app/routers/users.py new file mode 100644 index 0000000..2daa268 --- /dev/null +++ b/app/routers/users.py @@ -0,0 +1,173 @@ +from fastapi import APIRouter, Depends, HTTPException +from sqlmodel import Field, Session, SQLModel, create_engine, select +from typing import Union + +from app.database import get_session +from app.utils.globalMethods import AuthNZCheck + + +# from ..dependencies import get_token_header + +router = APIRouter( + tags=["users"], + dependencies=[Depends(AuthNZCheck("registered_users"))], + # responses={404: {"description": "Not found"}}, +) + +@router.get("/min_date_registered_users") +async def read_min_date_registered_users( + *, + session: Session = Depends(get_session), + tenenv_id: int +): + min_date = session.exec(""" + SELECT min(created) as min_date + FROM users + """).one() + return min_date + +@router.get("/registered_users_country") +async def read_users_country( + *, + session: Session = Depends(get_session), + offset: int = 0, + startDate: str = None, + endDate: str = None, + tenenv_id: int +): + interval_subquery = "" + if startDate and endDate: + interval_subquery = """ + WHERE users.created BETWEEN '{0}' AND '{1}' + """.format(startDate, endDate) + users_countries = session.exec( + """WITH users_countries AS ( + SELECT statistics_country_hashed.hasheduserid as userid, country, countrycode, count(*) as sum_count + FROM statistics_country_hashed + JOIN country_codes ON countryid=country_codes.id + WHERE tenenv_id = {1} + GROUP BY userid, country, countrycode + ), + max_count_users_countries AS ( + SELECT DISTINCT userid, max(sum_count) as max_sum_count,row_number() OVER (ORDER BY userid) as row_number + FROM users_countries + GROUP BY userid + ) + SELECT country,countrycode, count(*) as sum + FROM users_countries + JOIN ( + SELECT userid, max_sum_count, max(row_number) + FROM max_count_users_countries GROUP BY userid, max_sum_count + ) max_count_users_countries_no_duplicates + ON users_countries.userid=max_count_users_countries_no_duplicates.userid + AND users_countries.sum_count=max_count_users_countries_no_duplicates.max_sum_count + JOIN users ON users.hasheduserid=users_countries.userid AND status='A' + {0} + GROUP BY country,countrycode + ORDER BY country,countrycode + """.format(interval_subquery, tenenv_id)).all() + return users_countries + + +@router.get("/registered_users_country_group_by/{group_by}") +async def read_users_country_groupby( + *, + session: Session = Depends(get_session), + offset: int = 0, + group_by: str, + startDate: str = None, + endDate: str = None, + tenenv_id: int +): + if group_by: + interval_subquery = "" + if startDate and endDate: + interval_subquery = """ + WHERE users.created BETWEEN '{0}' AND '{1}' + """.format(startDate, endDate) + users = session.exec( + """WITH users_countries AS ( + SELECT statistics_country_hashed.hasheduserid as userid, country, countrycode, count(*) as sum_count + FROM statistics_country_hashed + JOIN country_codes ON countryid=country_codes.id + WHERE tenenv_id = {2} + GROUP BY userid, country, countrycode + ), + max_count_users_countries AS ( + SELECT DISTINCT userid, max(sum_count) as max_sum_count,row_number() OVER (ORDER BY userid) as row_number + FROM users_countries + GROUP BY userid + ) + SELECT range_date, min(created_min_date) as min_date, STRING_AGG(country, '|| ') as countries, sum(sum) as count + FROM + ( + SELECT date_trunc('{0}', users.created) as range_date, CONCAT(country,': ',count(*)) as country, min(users.created) as created_min_date, count(*) as sum + FROM users_countries + JOIN ( + SELECT userid, max_sum_count, max(row_number) + FROM max_count_users_countries GROUP BY userid, max_sum_count + ) max_count_users_countries_no_duplicates + ON users_countries.userid=max_count_users_countries_no_duplicates.userid + AND users_countries.sum_count=max_count_users_countries_no_duplicates.max_sum_count + JOIN users ON users.hasheduserid=users_countries.userid AND status='A' + {1} + GROUP BY range_date, country,countrycode + ORDER BY range_date, country + ) user_country_group_by + GROUP BY range_date""".format(group_by, interval_subquery, tenenv_id)).all() + return users + + +@router.get("/registered_users_groupby/{group_by}") +async def read_users_groupby( + *, + session: Session = Depends(get_session), + offset: int = 0, + group_by: str, + interval: Union[str, None] = None, + count_interval: int = None, + startDate: str = None, + endDate: str = None, + tenenv_id: int +): + interval_subquery = "" + if group_by: + if interval and count_interval: + interval_subquery = """AND created > + date_trunc('{0}', CURRENT_DATE) - INTERVAL '{1} {2}'""".format(group_by, count_interval, interval) + if startDate and endDate: + interval_subquery = """ + AND created BETWEEN '{0}' AND '{1}' + """.format(startDate, endDate) + users = session.exec(""" + select count(*) as count, date_trunc( '{0}', created ) as range_date, + min(created) as min_date + from users + WHERE status = 'A' AND tenenv_id = {1} + {2} + group by range_date + ORDER BY range_date ASC + """.format(group_by, tenenv_id, interval_subquery)).all() + return users + + +@router.get("/registered_users_countby") +async def read_users_countby( + *, + session: Session = Depends(get_session), + offset: int = 0, + interval: Union[str, None] = None, + count_interval: int = None, + tenenv_id: int +): + interval_subquery = "" + if interval and count_interval: + interval_subquery = """AND created > + CURRENT_DATE - INTERVAL '{0} {1}'""".format(count_interval, interval) + + users = session.exec(""" + select count(*) as count + from users + WHERE status = 'A' AND tenenv_id = {1} + {0}""".format(interval_subquery, tenenv_id)).all() + return users diff --git a/app/utils/__init__.py b/app/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/utils/configParser.py b/app/utils/configParser.py new file mode 100644 index 0000000..8de64cc --- /dev/null +++ b/app/utils/configParser.py @@ -0,0 +1,36 @@ +import os +from configparser import RawConfigParser +from app.utils.fastapiGlobals import g + +# TODO: We need to cache the content of the file +def getConfig(section='source_database', config_file='config.py'): + + # create a parser + parser = RawConfigParser() + + # XXX Since Entitlement contain both colons(:) and equal signs + # we will configure the semi colon (;) as a delimiter for + # the case of authorize configuration. + # Which means that we have to modify the comment prefix as well + if "authorize" in config_file: + parser = RawConfigParser(delimiters=';', comment_prefixes='%%', ) + # print(sys.argv[0]) + # print(os.path.dirname(os.path.abspath(sys.argv[0]))) + # read config file + file_dir = os.path.dirname(os.path.realpath('__file__')) + parser.read(os.path.join(file_dir, config_file)) + + # get section, default to source_database + config = {} + + if parser.has_section(section): + + params = parser.items(section) + for param in params: + config[param[0]] = param[1] + + else: + raise Exception( + 'Section {0} not found in the {1} file'.format(section, config_file)) + + return config diff --git a/app/utils/fastapiGlobals.py b/app/utils/fastapiGlobals.py new file mode 100644 index 0000000..548b922 --- /dev/null +++ b/app/utils/fastapiGlobals.py @@ -0,0 +1,138 @@ +""" +This allows to use global variables inside the FastAPI application using async mode. + +# Usage + +Just import `g` and then access (set/get) attributes of it: +```python +from your_project.globals import g + + +g.foo = "foo" + +# In some other code +assert g.foo == "foo" +``` + +Best way to utilize the global `g` in your code is to set the desired +value in a FastAPI dependency, like so: +```python +async def set_global_foo() -> None: + g.foo = "foo" + + +@app.get("/test/", dependencies=[Depends(set_global_foo)]) +async def test(): + assert g.foo == "foo" +``` + +# Setup + +Add the `GlobalsMiddleware` to your app: +```python +app = fastapi.FastAPI( + title="Your app API", +) +app.add_middleware(GlobalsMiddleware) # <-- This line is necessary +``` + +Then just use it. ;-) + +# Default values + +You may use `g.set_default("name", some_value)` to set a default value +for a global variable. This default value will then be used instead of `None` +when the variable is accessed before it was set. + +Note that default values should only be set at startup time, never +inside dependencies or similar. Otherwise you may run into the issue that +the value was already used any thus have a value of `None` set already, which +would result in the default value not being used. +""" +from collections.abc import Awaitable, Callable +from contextvars import ContextVar, copy_context +from typing import Any + +from fastapi import Request, Response +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.types import ASGIApp + + +class Globals: + __slots__ = ("_vars", "_defaults") + + _vars: dict[str, ContextVar] + _defaults: dict[str, Any] + + def __init__(self) -> None: + object.__setattr__(self, '_vars', {}) + object.__setattr__(self, '_defaults', {}) + + def set_default(self, name: str, default: Any) -> None: + """Set a default value for a variable.""" + + # Ignore if default is already set and is the same value + if ( + name in self._defaults + and default is self._defaults[name] + ): + return + + # Ensure we don't have a value set already - the default will have + # no effect then + if name in self._vars: + raise RuntimeError( + f"Cannot set default as variable {name} was already set", + ) + + # Set the default already! + self._defaults[name] = default + + def _get_default_value(self, name: str) -> Any: + """Get the default value for a variable.""" + + default = self._defaults.get(name, None) + + return default() if callable(default) else default + + def _ensure_var(self, name: str) -> None: + """Ensure a ContextVar exists for a variable.""" + + if name not in self._vars: + default = self._get_default_value(name) + self._vars[name] = ContextVar(f"globals:{name}", default=default) + + def __getattr__(self, name: str) -> Any: + """Get the value of a variable.""" + + self._ensure_var(name) + return self._vars[name].get() + + def __setattr__(self, name: str, value: Any) -> None: + """Set the value of a variable.""" + + self._ensure_var(name) + self._vars[name].set(value) + + +async def globals_middleware_dispatch( + request: Request, + call_next: Callable, +) -> Response: + """Dispatch the request in a new context to allow globals to be used.""" + + ctx = copy_context() + + def _call_next() -> Awaitable[Response]: + return call_next(request) + + return await ctx.run(_call_next) + + +class GlobalsMiddleware(BaseHTTPMiddleware): # noqa + """Middleware to setup the globals context using globals_middleware_dispatch().""" + + def __init__(self, app: ASGIApp) -> None: + super().__init__(app, globals_middleware_dispatch) + +g = Globals() \ No newline at end of file diff --git a/app/utils/globalMethods.py b/app/utils/globalMethods.py new file mode 100644 index 0000000..6e4c202 --- /dev/null +++ b/app/utils/globalMethods.py @@ -0,0 +1,154 @@ +from pprint import pprint +import requests as reqs +from fastapi import Depends, FastAPI, HTTPException, Query, Request, HTTPException, status, Response +import json, jwt + +from app.utils import configParser +from authlib.integrations.starlette_client import OAuth, OAuthError +from app.utils.fastapiGlobals import g + + +# https://www.fastapitutorial.com/blog/class-based-dependency-injection/ +class AuthNZCheck: + def __init__(self, tag: str = "", skip: bool = False): + self.skip = skip + self.tag = tag + self.oauth = OAuth() + + async def __call__(self, request: Request, response: Response): + # config + authorize_file = 'authorize.' + g.tenant + '.' + g.environment + '.py' + config_file = 'config.' + g.tenant + '.' + g.environment + '.py' + oidc_config = configParser.getConfig('oidc_client', config_file) + + self.oauth.register( + 'rciam', + client_id=oidc_config['client_id'], + client_secret=oidc_config['client_secret'], + server_metadata_url=oidc_config['issuer'] + "/.well-known/openid-configuration", + client_kwargs={'scope': 'openid profile email voperson_id eduperson_entitlement'} + ) + + response.headers["Access-Control-Expose-Headers"] = "X-Permissions, X-Authenticated, X-Redirect" + + # permissions calculation + access_token = request.headers.get('x-access-token') + rciam = self.oauth.create_client('rciam') + metadata = await rciam.load_server_metadata() + + headers = {'Authorization': f'Bearer {access_token}'} + resp = reqs.get(metadata['userinfo_endpoint'], headers=headers) + + # Authentication + if resp.status_code == 401: + # For now we skip logins and dashboard routes + if (self.tag == 'logins' or self.tag == 'dashboard') and self.skip: + permissions = permissionsCalculation(authorize_file) + permissions_json = json.dumps(permissions).replace(" ", "").replace("\n", "") + # pprint(permissions_json) + response.headers["X-Permissions"] = permissions_json + response.headers["X-Authenticated"] = "false" + response.headers["X-Redirect"] = "false" + return + + raise HTTPException( + status_code=401, + detail="Authentication failed", + headers={ + "X-Authenticated": "false", + "X-Redirect": "true", + "Access-Control-Expose-Headers": "X-Permissions, X-Authenticated, X-Redirect" + } + ) + else: + try: + resp.raise_for_status() + data = resp.json() + except Exception as er: + # TODO: Log here + raise HTTPException(status_code=500) + + # Authorization + permissions = permissionsCalculation(authorize_file, data) + permissions_json = json.dumps(permissions).replace(" ", "").replace("\n", "") + + # Add the permission to a custom header field + response.headers["X-Permissions"] = permissions_json + response.headers["X-Authenticated"] = "true" + + if bool(self.tag): + # Currently we only care about view + if permissions['actions'][self.tag]['view'] == False: + HTTPException(status_code=403) + + +def permissionsCalculation(authorize_file, user_info=None): + entitlements_config = configParser.getConfig('entitlements', authorize_file) + user_entitlements = {} + if user_info is not None: + user_entitlements = user_info.get('eduperson_entitlement') + + roles = { + 'anonymous': True, + 'authenticated': False, + 'administrator': False + } + + for ent, role in entitlements_config.items(): + if user_entitlements is not None and ent in user_entitlements: + # Reset the default anonymous role + roles['anonymous'] = False + # The role might be a csv list. So we need to + # explode and act accordingly + for item_role in role.split(","): + roles[item_role] = True + + # pprint(roles) + + actions = { + 'dashboard': { + 'view': False, + 'write': False + }, + 'identity_providers': { + 'view': False, + 'write': False + }, + 'service_providers': { + 'view': False, + 'write': False + }, + 'logins': { + 'view': True, + 'write': True + }, + 'registered_users': { + 'view': False, + 'write': False + }, + 'communities': { + 'view': False, + 'write': False + }, + 'statistics_raw': { + 'views': False, + 'write': False, + } + } + + for role in roles.keys(): + if roles[role]: + role_actions = configParser.getConfig(role, authorize_file) + for view, config_actions in role_actions.items(): + for item in config_actions.split(","): + actions[view][item] = True + return { + 'roles': roles, + 'actions': actions + } + + +def hasAction(user_actions, category, action): + if (user_actions[category][action] is True): + return True + return False diff --git a/app/utils/ipDatabase.py b/app/utils/ipDatabase.py new file mode 100644 index 0000000..19a1be7 --- /dev/null +++ b/app/utils/ipDatabase.py @@ -0,0 +1,20 @@ +from abc import ABC, abstractmethod +from app.utils import configParser +import geoip2.database + + +class ipDatabase(ABC): + DBFILENAME = configParser.getConfig('ip_database_file', 'config.global.py')['db_filename'] + + @abstractmethod + def getCountryFromIp(self): + pass + + +class geoip2Database(ipDatabase): + @classmethod + def getCountryFromIp(self, ip): + gi = geoip2.database.Reader("""./app/ip_databases/{0}""" + .format(ipDatabase.DBFILENAME)) + return [gi.country(ip).country.iso_code, gi.country(ip).country.name] + diff --git a/docker-compose.yml b/docker-compose.yml index 15a2c64..a67e721 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -26,8 +26,8 @@ services: image: metricsrciam:latest container_name: metrcis.rciam.fastapi environment: - - PATH=$PATH:.local/bin - DATABASE_URL=postgresql+psycopg2://rciam:secret@db/metrics_dev + - API_ENVIRONMENT=dev command: uvicorn app.main:app --reload --workers 1 --host 0.0.0.0 --port 8000 ports: - "8004:8000" @@ -47,6 +47,7 @@ services: - "3300:3000" volumes: - ./javascript:/app + # - ./javascript/node_modules tty: true volumes: diff --git a/javascript/.gitignore b/javascript/.gitignore index e22e040..e80f931 100644 --- a/javascript/.gitignore +++ b/javascript/.gitignore @@ -23,3 +23,4 @@ yarn-debug.log* yarn-error.log* .npm/** package-lock.json +/src/*.json diff --git a/javascript/CHANGELOG.md b/javascript/CHANGELOG.md new file mode 100644 index 0000000..7463db7 --- /dev/null +++ b/javascript/CHANGELOG.md @@ -0,0 +1,4 @@ +### Changelog + +## Prerelease logging +- Development RC phase \ No newline at end of file diff --git a/javascript/package.json b/javascript/package.json index c9958ed..224a9bc 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -6,9 +6,9 @@ "@date-io/moment": "^2.14.0", "@emotion/react": "^11.9.0", "@emotion/styled": "^11.8.1", - "@fortawesome/fontawesome-svg-core": "^6.1.1", - "@fortawesome/free-solid-svg-icons": "^6.1.1", - "@fortawesome/react-fontawesome": "^0.1.18", + "@fortawesome/fontawesome-svg-core": "^6.3.0", + "@fortawesome/free-solid-svg-icons": "^6.3.0", + "@fortawesome/react-fontawesome": "^0.2.0", "@hookform/error-message": "^2.0.0", "@hookform/resolvers": "^2.8.10", "@mui/icons-material": "^5.8.0", @@ -18,19 +18,49 @@ "@testing-library/react": "^13.2.0", "@testing-library/user-event": "^13.5.0", "axios": "^0.27.2", + "bootstrap": "^5.2.2", + "datatable": "^2.0.2", + "datatables.net": "^1.13.1", + "datatables.net-buttons": "^2.3.2", + "datatables.net-buttons-dt": "^2.3.3", + "datatables.net-dt": "^1.13.1", + "dateformat": "^5.0.3", + "html-react-parser": "^3.0.8", + "i18next": "^22.4.9", + "i18next-browser-languagedetector": "^7.0.1", + "i18next-http-backend": "^2.1.1", + "jquery": "^3.6.1", + "jquery-mapael": "^2.2.0", + "json-loader": "^0.5.7", + "jwt-decode": "^3.1.2", "moment": "^2.29.3", + "pdfmake": "^0.2.6", "react": "^18.1.0", + "react-bootstrap": "^2.7.2", + "react-bootstrap-sidebar-menu": "^2.0.3", + "react-cookie": "^4.1.1", "react-date-picker": "^8.4.0", "react-datepicker": "^4.8.0", "react-dom": "^18.1.0", + "react-dropdown": "^1.11.0", + "react-google-charts": "^4.0.0", "react-hook-form": "^7.31.1", + "react-i18next": "^12.1.5", "react-query": "^3.39.0", "react-router-dom": "^6.3.0", "react-scripts": "5.0.1", - "react-toastify": "^9.0.1", + "react-select": "^5.6.1", + "react-tabs": "^6.0.0", + "react-toastify": "^9.1.1", + "react-tooltip": "^4.5.0", + "sass": "^1.58.0", "web-vitals": "^2.1.4", "yup": "^0.32.11" }, + "devDependencies": { + "@babel/plugin-proposal-private-property-in-object": "*" + }, + "homepage": ".", "scripts": { "start": "react-scripts start", "build": "react-scripts build", diff --git a/javascript/public/index.html b/javascript/public/index.html new file mode 100644 index 0000000..10b7a22 --- /dev/null +++ b/javascript/public/index.html @@ -0,0 +1,23 @@ + + + + + + + + + + + Rciam Metrics + + + +
+ + diff --git a/javascript/src/App.jsx b/javascript/src/App.jsx index f71f02d..1ff758c 100644 --- a/javascript/src/App.jsx +++ b/javascript/src/App.jsx @@ -1,25 +1,95 @@ +import React, {useState, useEffect} from "react"; +import {Route, Routes} from 'react-router-dom' +import Communities from "./Pages/Communities"; +import Users from "./Pages/Users"; +import Dashboard from "./Pages/Dashboard"; +import Idps from "./Pages/Idps"; +import Sps from "./Pages/Sps"; +import Sp from "./Pages/Sps/sp"; +import Idp from "./Pages/Idps/idp"; +import Login from "./Pages/Authentication/Login"; import "./app.css"; -import {BrowserRouter as Router, Routes, Route} from "react-router-dom"; -import Login from "./Pages/Login"; -import Register from "./Pages/Register"; +import "./style.scss"; +import 'react-toastify/dist/ReactToastify.css'; +import jwt_decode from "jwt-decode"; +import { + languageContext, + userinfoContext +} from "./Context/context"; +import Layout from "./components/Common/layout"; +import SideNav from "./components/Common/sideNav"; +import Main from "./components/Common/main"; +import {ToastContainer} from "react-toastify"; import ErrorPage from "./Pages/Error"; -import {QueryClient, QueryClientProvider} from 'react-query' +import {useCookies} from 'react-cookie'; +import {toast} from 'react-toastify'; +import Middleware from "./components/Common/middleware" + function App() { - const queryClient = new QueryClient() - - return ( - - - - }/> - }/> - }/> - }/> - - - - ); + const [language, setLanguage] = useState('en') + const [userInfo, setUserInfo] = useState(null) + const [permissions, setPermissions] = useState(null) + const [cookies, setCookie] = useCookies(); + + useEffect(() => { + if (cookies.userinfo != undefined) { + setUserInfo(jwt_decode(cookies.userinfo)) + } + if (cookies.permissions != undefined) { + // The backend will send an encoded permissions while + // the frontend adds a simple json value + try { + setPermissions(jwt_decode(cookies.permissions)) + } catch (error) { + setPermissions(cookies.permissions) + } + } + }, [cookies.userinfo, cookies.permissions]) + + useEffect(() => { + if (userInfo != undefined) { + toast.info(`Welcome ${userInfo.name}`) + } + }, [userInfo]) + + return ( + + + + +
+ +
+ +
+
+
+ ); } +function AppRoutes() { + return ( + + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + + ) +} + + export default App; \ No newline at end of file diff --git a/javascript/src/Context/UserContext.js b/javascript/src/Context/UserContext.js index 4111568..6c1e7cb 100644 --- a/javascript/src/Context/UserContext.js +++ b/javascript/src/Context/UserContext.js @@ -1,3 +1,3 @@ -import { createContext } from "react"; +import React, { createContext } from "react"; export const UserContext = createContext(null); diff --git a/javascript/src/Context/UserProvider.js b/javascript/src/Context/UserProvider.js index 0eabaca..91074de 100644 --- a/javascript/src/Context/UserProvider.js +++ b/javascript/src/Context/UserProvider.js @@ -1,5 +1,4 @@ -import * as React from 'react' -import {useState} from "react"; +import React, {useState} from 'react' import {UserContext} from "./UserContext"; diff --git a/javascript/src/Context/context.js b/javascript/src/Context/context.js new file mode 100644 index 0000000..42416e6 --- /dev/null +++ b/javascript/src/Context/context.js @@ -0,0 +1,5 @@ +import React from 'react'; + +export const userContext = React.createContext(); +export const languageContext = React.createContext(); +export const userinfoContext = React.createContext(); \ No newline at end of file diff --git a/javascript/src/Context/provider.js b/javascript/src/Context/provider.js new file mode 100644 index 0000000..d09c763 --- /dev/null +++ b/javascript/src/Context/provider.js @@ -0,0 +1,23 @@ +import * as React from 'react' +import {BrowserRouter as Router} from 'react-router-dom' +import {QueryClient, QueryClientProvider} from 'react-query' +import {CookiesProvider} from 'react-cookie'; + +function AppProviders({children}) { + + const queryClient = new QueryClient() + + return ( + + + + {children} + + + + ) +} + +export { + AppProviders +} \ No newline at end of file diff --git a/javascript/src/Pages/Authentication/Login.js b/javascript/src/Pages/Authentication/Login.js new file mode 100644 index 0000000..0975665 --- /dev/null +++ b/javascript/src/Pages/Authentication/Login.js @@ -0,0 +1,28 @@ +import React from "react"; +import Button from "react-bootstrap/Button"; +import {useTranslation} from "react-i18next"; +import {useCookies} from 'react-cookie'; +import config from '../../config.json' + +function Login() { + const {t, i18n} = useTranslation(); + const [cookies, setCookie] = useCookies(['login_start']); + + const handleLoginClick = () => { + // Set a cookie with the current location so the backend knows where to go + setCookie('login_start', window.location.href, {path: '/'}); + // This is not a request but a redirect. So i will include the x-keys here + setCookie('x-tenant', config.tenant, {path: '/'}); + setCookie('x-environment', config.environment, {path: '/'}); + // Redirect to the login endpoint + window.location.href = config?.login_url + } + + return ( + + ) +} + +export default Login \ No newline at end of file diff --git a/javascript/src/Pages/Communities/index.js b/javascript/src/Pages/Communities/index.js new file mode 100644 index 0000000..e9e43f7 --- /dev/null +++ b/javascript/src/Pages/Communities/index.js @@ -0,0 +1,62 @@ +import React, {useState, useEffect} from "react"; +import {useQuery} from 'react-query'; +import Container from "react-bootstrap/Container"; +import CommunitiesChart from "../../components/Communities/communitiesChart"; +import CommunitiesDataTable from "../../components/Communities/communitiesDataTable"; +import CommunitiesMap from "../../components/Communities/communitiesMap"; +import Header from "../../components/Common/header"; +import Footer from "../../components/Common/footer"; +import Spinner from "../../components/Common/spinner" +import Col from 'react-bootstrap/Col'; +import Row from 'react-bootstrap/Row'; +import {tenenvKey} from '../../utils/queryKeys' +import {getTenenv} from '../../utils/queries' +import {useCookies} from "react-cookie"; + +const Communities = () => { + const [tenenvId, setTenenvId] = useState(0); + const [cookies, setCookie] = useCookies(); + + const tenant = cookies['x-tenant'] + const environment = cookies['x-environment'] + + + const tenenv = useQuery( + [tenenvKey, {tenantId: tenant, environment: environment}], + getTenenv, { + retry: 0, + }) + + useEffect(() => { + setTenenvId(tenenv?.data?.[0]?.id) + }, [!tenenv.isLoading + && tenenv.isSuccess + && !tenenv.isFetching]) + + if(tenenv.isLoading + || tenenv.isFetching) { + return () + } + + if (tenenvId == undefined + || tenenvId == 0 + || tenenvId == "") { + return null + } + + return ( + +
+ + +

Communities

+ +
+ + + +