diff --git a/.github/workflows/release_prod.yml b/.github/workflows/release_prod.yml
new file mode 100644
index 0000000..00b1e7d
--- /dev/null
+++ b/.github/workflows/release_prod.yml
@@ -0,0 +1,304 @@
+name: CI at Main Branch
+run-name: ${{ github.actor }} is deploying at main branch 🚀
+on:
+ push:
+ branches:
+ - main
+env:
+ RELEASE_ID: rc-${{ github.ref_name }}-${{ github.run_id }}
+ NODE_VERSION: 16.x
+jobs:
+ configure:
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.set-matrix.outputs.matrix }}
+ steps:
+ - name: Checkout to repository
+ uses: actions/checkout@v3
+ with:
+ repository: 'grnet/rciam-deploy-inv'
+ ref: 'master'
+ ssh-key: ${{ secrets.DEPLOY_READ_SECRET }}
+ path: 'inventory'
+ - name: Set matrix data
+ id: set-matrix
+ working-directory: inventory/rciam-metrics/files/all/tenants
+ # https://www.jitsejan.com/use-github-actions-with-json-file-as-matrix
+ run: |
+ ls
+ cat ./config.json
+ echo "matrix=$(jq -c . < ./config.json)" >> $GITHUB_OUTPUT
+ checkout:
+ runs-on: ubuntu-latest
+ needs: configure
+ strategy:
+ matrix: ${{ fromJson(needs.configure.outputs.matrix) }}
+ steps:
+ - run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
+ - run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!"
+ - run: echo "🔎 The name of your branch is ${{ github.ref_name }} and your repository is ${{ github.repository }}."
+ - name: Check out repository code
+ uses: actions/checkout@v3
+ with:
+ path: 'metrics-app-${{matrix.tenant}}'
+ ref: 'main'
+ - run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
+ - run: echo "💡 The ${{ github.sha }} commit processing started."
+ - run: echo "🖥️ The workflow is now ready to test your code on the runner."
+ - name: List files in the repository
+ run: |
+ ls ${{ github.workspace }}
+ - name: Download playbook
+ uses: actions/checkout@v3
+ with:
+ # Repository name with owner. For example, actions/checkout
+ # Default: ${{ github.repository }}
+ repository: 'rciam/rciam-deploy'
+ ref: 'master'
+ path: 'roles'
+ - name: Download inventory
+ uses: actions/checkout@v3
+ with:
+ repository: 'grnet/rciam-deploy-inv'
+ ref: 'master'
+ ssh-key: ${{ secrets.DEPLOY_READ_SECRET }}
+ path: 'inventory'
+ - name: Run playbook (create react_config file)
+ uses: dawidd6/action-ansible-playbook@v2
+ with:
+ # Required, playbook filepath
+ playbook: metricsservers.yml
+ # Optional, directory where playbooks live
+ directory: ./roles
+ key: ${{ secrets.DEPLOY_READ_SECRET }}
+ # Optional, encrypted vault password
+ vault_password: ${{secrets.VAULT_PASSWORD}}
+ options: |
+ --inventory ${{ github.workspace }}/inventory/rciam-metrics/hosts.ini
+ --tags rciam-metrics:config-local
+ -u debian
+ - name: List files in the repository
+ # The tenant specific config file, i.e. config.tenant.environment.json, becomes plain config.json
+ # because the frontend lives under its own directory/path
+ run: |
+ ls -la ${{ github.workspace }}/inventory/rciam-metrics/files
+ - name: Move tenant config file to config.json
+ # The tenant specific config file, i.e. config.tenant.environment.json, becomes plain config.json
+ run: |
+ mv ${{ github.workspace }}/inventory/rciam-metrics/files/config.${{ matrix.tenant }}.json ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/src/config.json
+ - name: List files in metrics-app-${{matrix.tenant}} javascript
+ # The tenant specific config file, i.e. config.tenant.environment.json, becomes plain config.json
+ run: |
+ ls -la ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/src
+ - name: Share artifact inside workflow (frontend)
+ uses: actions/upload-artifact@v3
+ with:
+ name: react-application
+ path: |
+ ${{ github.workspace }}/metrics-app-${{matrix.tenant}}
+ - name: Use Node.js ${{ env.NODE_VERSION }}
+ uses: actions/setup-node@v3
+ with:
+ node-version: ${{ env.NODE_VERSION }}
+ - name: Install dependencies
+ run: |
+ cd ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript; npm install
+ - name: Build React application
+ run: |
+ cd ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript; CI=false npm run build
+ # Share artifact inside workflow
+ - name: List files in the repository
+ run: |
+ ls ${{ github.workspace }}/metrics-app-${{matrix.tenant}}
+ - name: Create release branch
+ run: cd ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/; git checkout -b ${{ env.RELEASE_ID }}
+ - name: Initialize mandatory git config
+ working-directory: ./metrics-app-${{matrix.tenant}}
+ run: |
+ git config user.name "GitHub Actions"
+ git config user.email noreply@github.com
+ - name: Push changes
+ uses: ad-m/github-push-action@master
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ branch: ${{ env.RELEASE_ID }}
+ directory: ./metrics-app-${{matrix.tenant}}
+ - name: Share artifact inside workflow - ${{matrix.tenant}}
+ uses: actions/upload-artifact@v3
+ with:
+ name: react-github-actions-build-${{matrix.tenant}}
+ path: |
+ ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/build
+ ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/CHANGELOG.md
+ ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/CHANGELOG.md
+ - name: Share artifact inside workflow - backend
+ uses: actions/upload-artifact@v3
+ with:
+ name: react-github-actions-build-backend
+ path: |
+ ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/app
+ ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/requirements.txt
+ ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/CHANGELOG.md
+ - run: echo "🍏 This job's status is ${{ job.status }}."
+ frontend_release:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix: ${{ fromJson(needs.configure.outputs.matrix) }}
+ # We specify that deploys needs to
+ # finish before we create a release
+ # Job outputs are available to all downstream jobs that depend on this job.
+ needs: [checkout, configure]
+ steps:
+ # Download previously shared build
+ - name: Get artifact
+ uses: actions/download-artifact@v3
+ with:
+ path: ./metrics-app-${{matrix.tenant}}
+ name: react-github-actions-build-${{matrix.tenant}}
+ - name: List files
+ run: |
+ pwd
+ ls -la ./
+ ls -la ./metrics-app-${{matrix.tenant}}/
+ ls -la ./metrics-app-${{matrix.tenant}}/javascript
+ ls -la ./metrics-app-${{matrix.tenant}}/javascript/build
+ - name: Compress action step
+ uses: a7ul/tar-action@v1.1.0
+ id: compress
+ # The frontend release is only the javascript/build
+ # We need to include all the files and directories since
+ # the action is not smart enough to under asterisk (*)
+ with:
+ command: c
+ cwd: ./metrics-app-${{matrix.tenant}}/javascript/build
+ files: |
+ asset-manifest.json
+ index.html
+ static
+ outPath: frontend-${{matrix.tenant}}-release-build.tar.gz
+ # Upload as an artifact of the current workflow
+ - name: Upload frontend build zip artifact
+ uses: actions/upload-artifact@v1
+ with:
+ name: frontend-${{matrix.tenant}}-release-build.tar.gz
+ path: frontend-${{matrix.tenant}}-release-build.tar.gz
+ - name: Create Release
+ uses: ncipollo/release-action@v1
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+ artifacts: "frontend-${{matrix.tenant}}-release-build.tar.gz"
+ bodyFile: "./metrics-app-${{matrix.tenant}}/javascript/CHANGELOG.md"
+ tag: "frontend-${{matrix.tenant}}-${{ github.ref_name }}-${{ env.RELEASE_ID }}"
+ makeLatest: true
+ generateReleaseNotes: true
+ backend_release:
+ runs-on: ubuntu-latest
+ # We specify that deploys needs to
+ # finish before we create a release
+ # Job outputs are available to all downstream jobs that depend on this job.
+ needs: [checkout, configure]
+ steps:
+ # Download previously shared build
+ - name: Get artifact
+ uses: actions/download-artifact@v3
+ with:
+ name: react-github-actions-build-backend
+ - name: Compress action step
+ uses: a7ul/tar-action@v1.1.0
+ id: compress
+ with:
+ command: c
+ cwd: ./
+ files: |
+ app/
+ requirements.txt
+ outPath: backend-release-build.tar.gz
+ - name: Upload backend build zip artifact
+ uses: actions/upload-artifact@v1
+ with:
+ name: backend-release-build.tar.gz
+ path: backend-release-build.tar.gz
+ - name: Create Release
+ uses: ncipollo/release-action@v1
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+ artifacts: "backend-release-build.tar.gz"
+ bodyFile: "CHANGELOG.md"
+ tag: "metrics-api-${{ github.ref_name }}-${{ env.RELEASE_ID }}"
+ makeLatest: true
+ generateReleaseNotes: true
+ deploy_backend:
+ runs-on: ubuntu-latest
+ # We specify that deploys needs to
+ # finish before we create a release
+ needs: backend_release
+ steps:
+ - name: Download playbook
+ uses: actions/checkout@v3
+ with:
+ # Repository name with owner. For example, actions/checkout
+ # Default: ${{ github.repository }}
+ repository: 'rciam/rciam-deploy'
+ ref: 'master'
+ path: 'roles'
+ - name: Download inventory
+ uses: actions/checkout@v3
+ with:
+ repository: 'grnet/rciam-deploy-inv'
+ ref: 'master'
+ ssh-key: ${{ secrets.DEPLOY_READ_SECRET }}
+ path: 'inventory'
+ - name: Run playbook (deploy rciam-metrics) with release metrics-api-${{ github.ref_name }}-${{ env.RELEASE_ID }}
+ uses: dawidd6/action-ansible-playbook@v2
+ with:
+ # Required, playbook filepath
+ playbook: metricsservers.yml
+ # Optional, directory where playbooks live
+ directory: ./roles
+ key: ${{ secrets.DEPLOY_READ_SECRET }}
+ # Optional, encrypted vault password
+ vault_password: ${{secrets.VAULT_PASSWORD}}
+ options: |
+ --inventory ${{ github.workspace }}/inventory/rciam-metrics/hosts.ini
+ --tags rciam-metrics:deploy-backend
+ -u debian
+ --extra-vars "metrics_release=metrics-api-${{ github.ref_name }}-${{ env.RELEASE_ID }}"
+ deploy_frontend:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix: ${{ fromJson(needs.configure.outputs.matrix) }}
+ # We specify that deploys needs to
+ # finish before we create a release
+ # Job outputs are available to all downstream jobs that depend on this job.
+ needs: [frontend_release, configure]
+ steps:
+ - name: Download playbook
+ uses: actions/checkout@v3
+ with:
+ # Repository name with owner. For example, actions/checkout
+ # Default: ${{ github.repository }}
+ repository: 'rciam/rciam-deploy'
+ ref: 'master'
+ path: 'roles'
+ - name: Download inventory
+ uses: actions/checkout@v3
+ with:
+ repository: 'grnet/rciam-deploy-inv'
+ ref: 'master'
+ ssh-key: ${{ secrets.DEPLOY_READ_SECRET }}
+ path: 'inventory'
+ - name: Run playbook (deploy rciam-metrics) with release frontend-${{matrix.tenant}}-${{ github.ref_name }}-${{ env.RELEASE_ID }}
+ uses: dawidd6/action-ansible-playbook@v2
+ with:
+ # Required, playbook filepath
+ playbook: metricsservers.yml
+ # Optional, directory where playbooks live
+ directory: ./roles
+ key: ${{ secrets.DEPLOY_READ_SECRET }}
+ # Optional, encrypted vault password
+ vault_password: ${{secrets.VAULT_PASSWORD}}
+ options: |
+ --inventory ${{ github.workspace }}/inventory/rciam-metrics/hosts.ini
+ --tags rciam-metrics:deploy-frontend
+ -u debian
+ --extra-vars "metrics_release=frontend-${{matrix.tenant}}-${{ github.ref_name }}-${{ env.RELEASE_ID }} tenant_environment=${{matrix.tenant}}"
\ No newline at end of file
diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml
new file mode 100644
index 0000000..539d0f1
--- /dev/null
+++ b/.github/workflows/releases.yml
@@ -0,0 +1,304 @@
+name: CI at Devel Branch
+run-name: ${{ github.actor }} is deploying at devel branch 🚀
+on:
+ push:
+ branches:
+ - develop
+env:
+ RELEASE_ID: rc-${{ github.ref_name }}-${{ github.run_id }}
+ NODE_VERSION: 16.x
+jobs:
+ configure:
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.set-matrix.outputs.matrix }}
+ steps:
+ - name: Checkout to repository
+ uses: actions/checkout@v3
+ with:
+ repository: 'grnet/rciam-deploy-inv'
+ ref: 'master'
+ ssh-key: ${{ secrets.DEPLOY_READ_SECRET }}
+ path: 'inventory'
+ - name: Set matrix data
+ id: set-matrix
+ working-directory: inventory/rciam-metrics-dev/files/all/tenants
+ # https://www.jitsejan.com/use-github-actions-with-json-file-as-matrix
+ run: |
+ ls
+ cat ./config.json
+ echo "matrix=$(jq -c . < ./config.json)" >> $GITHUB_OUTPUT
+ checkout:
+ runs-on: ubuntu-latest
+ needs: configure
+ strategy:
+ matrix: ${{ fromJson(needs.configure.outputs.matrix) }}
+ steps:
+ - run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
+ - run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!"
+ - run: echo "🔎 The name of your branch is ${{ github.ref_name }} and your repository is ${{ github.repository }}."
+ - name: Check out repository code
+ uses: actions/checkout@v3
+ with:
+ path: 'metrics-app-${{matrix.tenant}}'
+ ref: 'develop'
+ - run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
+ - run: echo "💡 The ${{ github.sha }} commit processing started."
+ - run: echo "🖥️ The workflow is now ready to test your code on the runner."
+ - name: List files in the repository
+ run: |
+ ls ${{ github.workspace }}
+ - name: Download playbook
+ uses: actions/checkout@v3
+ with:
+ # Repository name with owner. For example, actions/checkout
+ # Default: ${{ github.repository }}
+ repository: 'rciam/rciam-deploy'
+ ref: 'devel'
+ path: 'roles'
+ - name: Download inventory
+ uses: actions/checkout@v3
+ with:
+ repository: 'grnet/rciam-deploy-inv'
+ ref: 'master'
+ ssh-key: ${{ secrets.DEPLOY_READ_SECRET }}
+ path: 'inventory'
+ - name: Run playbook (create react_config file)
+ uses: dawidd6/action-ansible-playbook@v2
+ with:
+ # Required, playbook filepath
+ playbook: metricsservers.yml
+ # Optional, directory where playbooks live
+ directory: ./roles
+ key: ${{ secrets.DEPLOY_READ_SECRET }}
+ # Optional, encrypted vault password
+ vault_password: ${{secrets.VAULT_PASSWORD_DEVEL}}
+ options: |
+ --inventory ${{ github.workspace }}/inventory/rciam-metrics-dev/hosts.ini
+ --tags rciam-metrics:config-local
+ -u debian
+ - name: List files in the repository
+ # The tenant specific config file, i.e. config.tenant.environment.json, becomes plain config.json
+ # because the frontend lives under its own directory/path
+ run: |
+ ls -la ${{ github.workspace }}/inventory/rciam-metrics-dev/files
+ - name: Move tenant config file to config.json
+ # The tenant specific config file, i.e. config.tenant.environment.json, becomes plain config.json
+ run: |
+ mv ${{ github.workspace }}/inventory/rciam-metrics-dev/files/config.${{ matrix.tenant }}.json ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/src/config.json
+ - name: List files in metrics-app-${{matrix.tenant}} javascript
+ # The tenant specific config file, i.e. config.tenant.environment.json, becomes plain config.json
+ run: |
+ ls -la ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/src
+ - name: Share artifact inside workflow (frontend)
+ uses: actions/upload-artifact@v3
+ with:
+ name: react-application
+ path: |
+ ${{ github.workspace }}/metrics-app-${{matrix.tenant}}
+ - name: Use Node.js ${{ env.NODE_VERSION }}
+ uses: actions/setup-node@v3
+ with:
+ node-version: ${{ env.NODE_VERSION }}
+ - name: Install dependencies
+ run: |
+ cd ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript; npm install
+ - name: Build React application
+ run: |
+ cd ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript; CI=false npm run build
+ # Share artifact inside workflow
+ - name: List files in the repository
+ run: |
+ ls ${{ github.workspace }}/metrics-app-${{matrix.tenant}}
+ - name: Create release branch
+ run: cd ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/; git checkout -b ${{ env.RELEASE_ID }}
+ - name: Initialize mandatory git config
+ working-directory: ./metrics-app-${{matrix.tenant}}
+ run: |
+ git config user.name "GitHub Actions"
+ git config user.email noreply@github.com
+ - name: Push changes
+ uses: ad-m/github-push-action@master
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ branch: ${{ env.RELEASE_ID }}
+ directory: ./metrics-app-${{matrix.tenant}}
+ - name: Share artifact inside workflow - ${{matrix.tenant}}
+ uses: actions/upload-artifact@v3
+ with:
+ name: react-github-actions-build-${{matrix.tenant}}
+ path: |
+ ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/build
+ ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/javascript/CHANGELOG.md
+ ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/CHANGELOG.md
+ - name: Share artifact inside workflow - backend
+ uses: actions/upload-artifact@v3
+ with:
+ name: react-github-actions-build-backend
+ path: |
+ ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/app
+ ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/requirements.txt
+ ${{ github.workspace }}/metrics-app-${{matrix.tenant}}/CHANGELOG.md
+ - run: echo "🍏 This job's status is ${{ job.status }}."
+ frontend_release:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix: ${{ fromJson(needs.configure.outputs.matrix) }}
+ # We specify that deploys needs to
+ # finish before we create a release
+ # Job outputs are available to all downstream jobs that depend on this job.
+ needs: [checkout, configure]
+ steps:
+ # Download previously shared build
+ - name: Get artifact
+ uses: actions/download-artifact@v3
+ with:
+ path: ./metrics-app-${{matrix.tenant}}
+ name: react-github-actions-build-${{matrix.tenant}}
+ - name: List files
+ run: |
+ pwd
+ ls -la ./
+ ls -la ./metrics-app-${{matrix.tenant}}/
+ ls -la ./metrics-app-${{matrix.tenant}}/javascript
+ ls -la ./metrics-app-${{matrix.tenant}}/javascript/build
+ - name: Compress action step
+ uses: a7ul/tar-action@v1.1.0
+ id: compress
+ # The frontend release is only the javascript/build
+ # We need to include all the files and directories since
+ # the action is not smart enough to under asterisk (*)
+ with:
+ command: c
+ cwd: ./metrics-app-${{matrix.tenant}}/javascript/build
+ files: |
+ asset-manifest.json
+ index.html
+ static
+ outPath: frontend-${{matrix.tenant}}-release-build.tar.gz
+ # Upload as an artifact of the current workflow
+ - name: Upload frontend build zip artifact
+ uses: actions/upload-artifact@v1
+ with:
+ name: frontend-${{matrix.tenant}}-release-build.tar.gz
+ path: frontend-${{matrix.tenant}}-release-build.tar.gz
+ - name: Create Release
+ uses: ncipollo/release-action@v1
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+ artifacts: "frontend-${{matrix.tenant}}-release-build.tar.gz"
+ bodyFile: "./metrics-app-${{matrix.tenant}}/javascript/CHANGELOG.md"
+ tag: "frontend-${{matrix.tenant}}-${{ github.ref_name }}-${{ env.RELEASE_ID }}"
+ makeLatest: true
+ generateReleaseNotes: true
+ backend_release:
+ runs-on: ubuntu-latest
+ # We specify that deploys needs to
+ # finish before we create a release
+ # Job outputs are available to all downstream jobs that depend on this job.
+ needs: [checkout, configure]
+ steps:
+ # Download previously shared build
+ - name: Get artifact
+ uses: actions/download-artifact@v3
+ with:
+ name: react-github-actions-build-backend
+ - name: Compress action step
+ uses: a7ul/tar-action@v1.1.0
+ id: compress
+ with:
+ command: c
+ cwd: ./
+ files: |
+ app/
+ requirements.txt
+ outPath: backend-release-build.tar.gz
+ - name: Upload backend build zip artifact
+ uses: actions/upload-artifact@v1
+ with:
+ name: backend-release-build.tar.gz
+ path: backend-release-build.tar.gz
+ - name: Create Release
+ uses: ncipollo/release-action@v1
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+ artifacts: "backend-release-build.tar.gz"
+ bodyFile: "CHANGELOG.md"
+ tag: "metrics-api-${{ github.ref_name }}-${{ env.RELEASE_ID }}"
+ makeLatest: true
+ generateReleaseNotes: true
+ deploy_backend:
+ runs-on: ubuntu-latest
+ # We specify that deploys needs to
+ # finish before we create a release
+ needs: backend_release
+ steps:
+ - name: Download playbook
+ uses: actions/checkout@v3
+ with:
+ # Repository name with owner. For example, actions/checkout
+ # Default: ${{ github.repository }}
+ repository: 'rciam/rciam-deploy'
+ ref: 'devel'
+ path: 'roles'
+ - name: Download inventory
+ uses: actions/checkout@v3
+ with:
+ repository: 'grnet/rciam-deploy-inv'
+ ref: 'master'
+ ssh-key: ${{ secrets.DEPLOY_READ_SECRET }}
+ path: 'inventory'
+ - name: Run playbook (deploy rciam-metrics) with release metrics-api-${{ github.ref_name }}-${{ env.RELEASE_ID }}
+ uses: dawidd6/action-ansible-playbook@v2
+ with:
+ # Required, playbook filepath
+ playbook: metricsservers.yml
+ # Optional, directory where playbooks live
+ directory: ./roles
+ key: ${{ secrets.DEPLOY_READ_SECRET }}
+ # Optional, encrypted vault password
+ vault_password: ${{secrets.VAULT_PASSWORD_DEVEL}}
+ options: |
+ --inventory ${{ github.workspace }}/inventory/rciam-metrics-dev/hosts.ini
+ --tags rciam-metrics:deploy-backend
+ -u debian
+ --extra-vars "metrics_release=metrics-api-${{ github.ref_name }}-${{ env.RELEASE_ID }}"
+ deploy_frontend:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix: ${{ fromJson(needs.configure.outputs.matrix) }}
+ # We specify that deploys needs to
+ # finish before we create a release
+ # Job outputs are available to all downstream jobs that depend on this job.
+ needs: [frontend_release, configure]
+ steps:
+ - name: Download playbook
+ uses: actions/checkout@v3
+ with:
+ # Repository name with owner. For example, actions/checkout
+ # Default: ${{ github.repository }}
+ repository: 'rciam/rciam-deploy'
+ ref: 'devel'
+ path: 'roles'
+ - name: Download inventory
+ uses: actions/checkout@v3
+ with:
+ repository: 'grnet/rciam-deploy-inv'
+ ref: 'master'
+ ssh-key: ${{ secrets.DEPLOY_READ_SECRET }}
+ path: 'inventory'
+ - name: Run playbook (deploy rciam-metrics) with release frontend-${{matrix.tenant}}-${{ github.ref_name }}-${{ env.RELEASE_ID }}
+ uses: dawidd6/action-ansible-playbook@v2
+ with:
+ # Required, playbook filepath
+ playbook: metricsservers.yml
+ # Optional, directory where playbooks live
+ directory: ./roles
+ key: ${{ secrets.DEPLOY_READ_SECRET }}
+ # Optional, encrypted vault password
+ vault_password: ${{secrets.VAULT_PASSWORD_DEVEL}}
+ options: |
+ --inventory ${{ github.workspace }}/inventory/rciam-metrics-dev/hosts.ini
+ --tags rciam-metrics:deploy-frontend
+ -u debian
+ --extra-vars "metrics_release=frontend-${{matrix.tenant}}-${{ github.ref_name }}-${{ env.RELEASE_ID }} tenant_environment=${{matrix.tenant}}"
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index bc2c358..7ec08dd 100644
--- a/.gitignore
+++ b/.gitignore
@@ -134,6 +134,7 @@ dmypy.json
/node_modules
.npm/**
javascript/.npm/**
+javascript/.bash_history
.gnupg/**
.idea/**
.yarn/**
@@ -141,3 +142,4 @@ javascript/.npm/**
.vscode/
.DS_Store
yarn-error.log
+
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..7463db7
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,4 @@
+### Changelog
+
+## Prerelease logging
+- Development RC phase
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
index 4831740..00256e5 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,5 +1,5 @@
ARG PYTHON_IMAGE_REPO=python
-FROM ${PYTHON_IMAGE_REPO}:3.8.15-bullseye
+FROM FROM ${PYTHON_IMAGE_REPO}:3.11.5-bookworm
RUN curl -sL https://deb.nodesource.com/setup_18.x | sed "s/exec_cmd 'apt-get update'/exec_cmd 'apt-get --allow-releaseinfo-change update'/" | bash -
RUN echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list
RUN curl https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
@@ -18,6 +18,8 @@ RUN apt-get -qq --allow-releaseinfo-change update \
# Create working directory
ENV APP_HOME /app
+ENV API_ENVIRONMENT dev
+
RUN mkdir -p $APP_HOME
WORKDIR $APP_HOME
@@ -26,14 +28,17 @@ ARG APP_GID=1000
RUN groupadd -g ${APP_GID} app
RUN useradd -u ${APP_UID} -g ${APP_GID} -d $APP_HOME app
+RUN echo $(python3 -m site --user-base)
# set environment variables
+ENV PATH $APP_HOME/.local/bin:${PATH}
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV ENVIRONMENT dev
ENV TESTING 0
#COPY requirements* $APP_HOME
+RUN echo "fs.inotify.max_user_watches=524288" >> /etc/sysctl.conf
USER app:app
diff --git a/README.md b/README.md
index 0354330..40e538f 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,4 @@
-# rciam metrics
-RCIAM METRICS service
+
RCIAM Metrics v0.1.0
## Install
@@ -11,10 +10,10 @@ docker-compose pull
### Install python dependencies
docker-compose run --rm --no-deps web pip install --upgrade pip
-docker-compose run --rm --no-deps web pip install -r requirements.txt
+docker-compose run --rm --no-deps web pip3 install --no-cache-dir -r requirements.txt
### Install nodejs dependencies
-docker-compose run --rm --no-deps api npm install
+docker-compose run --rm --no-deps api npm install --prefer-online
### Run Database deployment
[//]: # (docker-compose run --rm web alembic revision --autogenerate -m 'Initial Migration')
@@ -25,4 +24,4481 @@ docker-compose run --rm web alembic upgrade head
[//]: # (docker-compose run --rm web python app/seed.py)
### Start the Service
-docker-compose up api
\ No newline at end of file
+docker-compose up api
+
+## API Guide
+
+
+users
+
+## read_users_country_registered_users_country_get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /registered_users_country?tenenv_id=0 \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /registered_users_country?tenenv_id=0 HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/registered_users_country?tenenv_id=0',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/registered_users_country',
+ params: {
+ 'tenenv_id' => 'integer'
+}, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/registered_users_country', params={
+ 'tenenv_id': '0'
+}, headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/registered_users_country', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/registered_users_country?tenenv_id=0");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/registered_users_country", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /registered_users_country`
+
+*Read Users Country*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|offset|query|integer|false|none|
+|startDate|query|string|false|none|
+|endDate|query|string|false|none|
+|tenenv_id|query|integer|true|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+## read_users_country_groupby_registered_users_country_group_by__group_by__get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /registered_users_country_group_by/{group_by}?tenenv_id=0 \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /registered_users_country_group_by/{group_by}?tenenv_id=0 HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/registered_users_country_group_by/{group_by}?tenenv_id=0',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/registered_users_country_group_by/{group_by}',
+ params: {
+ 'tenenv_id' => 'integer'
+}, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/registered_users_country_group_by/{group_by}', params={
+ 'tenenv_id': '0'
+}, headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/registered_users_country_group_by/{group_by}', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/registered_users_country_group_by/{group_by}?tenenv_id=0");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/registered_users_country_group_by/{group_by}", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /registered_users_country_group_by/{group_by}`
+
+*Read Users Country Groupby*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|group_by|path|string|true|none|
+|offset|query|integer|false|none|
+|startDate|query|string|false|none|
+|endDate|query|string|false|none|
+|tenenv_id|query|integer|true|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+## read_users_groupby_registered_users_groupby__group_by__get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /registered_users_groupby/{group_by}?tenenv_id=0 \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /registered_users_groupby/{group_by}?tenenv_id=0 HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/registered_users_groupby/{group_by}?tenenv_id=0',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/registered_users_groupby/{group_by}',
+ params: {
+ 'tenenv_id' => 'integer'
+}, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/registered_users_groupby/{group_by}', params={
+ 'tenenv_id': '0'
+}, headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/registered_users_groupby/{group_by}', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/registered_users_groupby/{group_by}?tenenv_id=0");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/registered_users_groupby/{group_by}", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /registered_users_groupby/{group_by}`
+
+*Read Users Groupby*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|group_by|path|string|true|none|
+|offset|query|integer|false|none|
+|interval|query|string|false|none|
+|count_interval|query|integer|false|none|
+|startDate|query|string|false|none|
+|endDate|query|string|false|none|
+|tenenv_id|query|integer|true|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+## read_users_countby_registered_users_countby_get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /registered_users_countby?tenenv_id=0 \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /registered_users_countby?tenenv_id=0 HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/registered_users_countby?tenenv_id=0',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/registered_users_countby',
+ params: {
+ 'tenenv_id' => 'integer'
+}, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/registered_users_countby', params={
+ 'tenenv_id': '0'
+}, headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/registered_users_countby', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/registered_users_countby?tenenv_id=0");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/registered_users_countby", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /registered_users_countby`
+
+*Read Users Countby*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|offset|query|integer|false|none|
+|interval|query|string|false|none|
+|count_interval|query|integer|false|none|
+|tenenv_id|query|integer|true|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+communities
+
+## read_members_members__get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /members/ \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /members/ HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/members/',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/members/',
+ params: {
+ }, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/members/', headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/members/', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/members/");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/members/", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /members/`
+
+*Read Members*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|offset|query|integer|false|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+[
+ {
+ "community_id": 0,
+ "hasheduserid": "string",
+ "status": "string",
+ "community_info": {
+ "name": "string",
+ "description": "string",
+ "source": "string",
+ "id": 0
+ }
+ }
+]
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+Status Code **200**
+
+*Response Read Members Members Get*
+
+|Name|Type|Required|Restrictions|Description|
+|---|---|---|---|---|
+|Response Read Members Members Get|[[MembersReadWithCommunityInfo](#schemamembersreadwithcommunityinfo)]|false|none|none|
+|» MembersReadWithCommunityInfo|[MembersReadWithCommunityInfo](#schemamembersreadwithcommunityinfo)|false|none|none|
+|»» community_id|integer|true|none|none|
+|»» hasheduserid|string|true|none|none|
+|»» status|string|true|none|none|
+|»» community_info|[Community_InfoRead](#schemacommunity_inforead)|true|none|none|
+|»»» name|string|true|none|none|
+|»»» description|string|true|none|none|
+|»»» source|string|true|none|none|
+|»»» id|integer|true|none|none|
+
+
+This operation does not require authentication
+
+
+## read_members_bystatus_members_bystatus__get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /members_bystatus/?tenenv_id=0 \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /members_bystatus/?tenenv_id=0 HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/members_bystatus/?tenenv_id=0',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/members_bystatus/',
+ params: {
+ 'tenenv_id' => 'integer'
+}, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/members_bystatus/', params={
+ 'tenenv_id': '0'
+}, headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/members_bystatus/', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/members_bystatus/?tenenv_id=0");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/members_bystatus/", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /members_bystatus/`
+
+*Read Members Bystatus*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|offset|query|integer|false|none|
+|community_id|query|integer|false|none|
+|tenenv_id|query|integer|true|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+## read_communities_communities_groupby__group_by__get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /communities_groupby/{group_by}?tenenv_id=0 \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /communities_groupby/{group_by}?tenenv_id=0 HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/communities_groupby/{group_by}?tenenv_id=0',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/communities_groupby/{group_by}',
+ params: {
+ 'tenenv_id' => 'integer'
+}, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/communities_groupby/{group_by}', params={
+ 'tenenv_id': '0'
+}, headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/communities_groupby/{group_by}', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/communities_groupby/{group_by}?tenenv_id=0");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/communities_groupby/{group_by}", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /communities_groupby/{group_by}`
+
+*Read Communities*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|group_by|path|string|true|none|
+|offset|query|integer|false|none|
+|tenenv_id|query|integer|true|none|
+|interval|query|string|false|none|
+|count_interval|query|integer|false|none|
+|startDate|query|string|false|none|
+|endDate|query|string|false|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+## read_community_communities__get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /communities/?tenenv_id=0 \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /communities/?tenenv_id=0 HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/communities/?tenenv_id=0',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/communities/',
+ params: {
+ 'tenenv_id' => 'integer'
+}, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/communities/', params={
+ 'tenenv_id': '0'
+}, headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/communities/', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/communities/?tenenv_id=0");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/communities/", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /communities/`
+
+*Read Community*
+
+
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|community_id|query|integer|false|none|
+|tenenv_id|query|integer|true|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+
+
+
+This operation does not require authentication
+
+
+## read_communities_info_communities_info__get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /communities_info/ \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /communities_info/ HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/communities_info/',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/communities_info/',
+ params: {
+ }, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/communities_info/', headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/communities_info/', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/communities_info/");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/communities_info/", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /communities_info/`
+
+*Read Communities Info*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|offset|query|integer|false|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+[
+ {
+ "name": "string",
+ "description": "string",
+ "source": "string",
+ "id": 0
+ }
+]
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+Status Code **200**
+
+*Response Read Communities Info Communities Info Get*
+
+|Name|Type|Required|Restrictions|Description|
+|---|---|---|---|---|
+|Response Read Communities Info Communities Info Get|[[Community_InfoRead](#schemacommunity_inforead)]|false|none|none|
+|» Community_InfoRead|[Community_InfoRead](#schemacommunity_inforead)|false|none|none|
+|»» name|string|true|none|none|
+|»» description|string|true|none|none|
+|»» source|string|true|none|none|
+|»» id|integer|true|none|none|
+
+
+This operation does not require authentication
+
+
+countries
+
+## read_countries_countries__get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /countries/ \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /countries/ HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/countries/',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/countries/',
+ params: {
+ }, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/countries/', headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/countries/', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/countries/");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/countries/", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /countries/`
+
+*Read Countries*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|offset|query|integer|false|none|
+|tag|query|string|false|none|
+|skip|query|boolean|false|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+[
+ {
+ "countrycode": "string",
+ "country": "string",
+ "id": 0
+ }
+]
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+Status Code **200**
+
+*Response Read Countries Countries Get*
+
+|Name|Type|Required|Restrictions|Description|
+|---|---|---|---|---|
+|Response Read Countries Countries Get|[[Country_CodesRead](#schemacountry_codesread)]|false|none|none|
+|» Country_CodesRead|[Country_CodesRead](#schemacountry_codesread)|false|none|none|
+|»» countrycode|string|true|none|none|
+|»» country|string|true|none|none|
+|»» id|integer|true|none|none|
+
+
+This operation does not require authentication
+
+
+## read_country_stats_country_stats__get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /country_stats/ \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /country_stats/ HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/country_stats/',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/country_stats/',
+ params: {
+ }, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/country_stats/', headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/country_stats/', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/country_stats/");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/country_stats/", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /country_stats/`
+
+*Read Country Stats*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|offset|query|integer|false|none|
+|tag|query|string|false|none|
+|skip|query|boolean|false|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+[
+ {
+ "date": "2019-08-24",
+ "hasheduserid": "string",
+ "sourceidpid": 0,
+ "serviceid": 0,
+ "countryid": 0,
+ "count": 0,
+ "identityprovider_info": {
+ "entityid": "string",
+ "name": "string",
+ "id": 0
+ },
+ "serviceprovider_info": {
+ "identifier": "string",
+ "name": "string",
+ "id": 0
+ },
+ "country_info": {
+ "countrycode": "string",
+ "country": "string",
+ "id": 0
+ }
+ }
+]
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+Status Code **200**
+
+*Response Read Country Stats Country Stats Get*
+
+|Name|Type|Required|Restrictions|Description|
+|---|---|---|---|---|
+|Response Read Country Stats Country Stats Get|[[Statistics_Country_HashedwithInfo](#schemastatistics_country_hashedwithinfo)]|false|none|none|
+|» Statistics_Country_HashedwithInfo|[Statistics_Country_HashedwithInfo](#schemastatistics_country_hashedwithinfo)|false|none|none|
+|»» date|string(date)|true|none|none|
+|»» hasheduserid|string|true|none|none|
+|»» sourceidpid|integer|true|none|none|
+|»» serviceid|integer|true|none|none|
+|»» countryid|integer|true|none|none|
+|»» count|integer|true|none|none|
+|»» identityprovider_info|[IdentityprovidersmapRead](#schemaidentityprovidersmapread)|false|none|none|
+|»»» entityid|string|true|none|none|
+|»»» name|string|true|none|none|
+|»»» id|integer|true|none|none|
+|»» serviceprovider_info|[ServiceprovidersmapRead](#schemaserviceprovidersmapread)|false|none|none|
+|»»» identifier|string|true|none|none|
+|»»» name|string|true|none|none|
+|»»» id|integer|true|none|none|
+|»» country_info|[Country_CodesRead](#schemacountry_codesread)|false|none|none|
+|»»» countrycode|string|true|none|none|
+|»»» country|string|true|none|none|
+|»»» id|integer|true|none|none|
+
+
+This operation does not require authentication
+
+
+## read_country_stats_by_vo_country_stats_by_vo__community_id__get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /country_stats_by_vo/{community_id} \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /country_stats_by_vo/{community_id} HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/country_stats_by_vo/{community_id}',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/country_stats_by_vo/{community_id}',
+ params: {
+ }, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/country_stats_by_vo/{community_id}', headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/country_stats_by_vo/{community_id}', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/country_stats_by_vo/{community_id}");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/country_stats_by_vo/{community_id}", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /country_stats_by_vo/{community_id}`
+
+*Read Country Stats By Vo*
+
+
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|community_id|path|integer|true|none|
+|offset|query|integer|false|none|
+|tag|query|string|false|none|
+|skip|query|boolean|false|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+
+
+
+This operation does not require authentication
+
+
+logins
+
+## read_logins_per_idp_logins_per_idp_get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /logins_per_idp?tenenv_id=0 \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /logins_per_idp?tenenv_id=0 HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/logins_per_idp?tenenv_id=0',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/logins_per_idp',
+ params: {
+ 'tenenv_id' => 'integer'
+}, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/logins_per_idp', params={
+ 'tenenv_id': '0'
+}, headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/logins_per_idp', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/logins_per_idp?tenenv_id=0");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/logins_per_idp", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /logins_per_idp`
+
+*Read Logins Per Idp*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|offset|query|integer|false|none|
+|sp|query|string|false|none|
+|startDate|query|string|false|none|
+|endDate|query|string|false|none|
+|tenenv_id|query|integer|true|none|
+|unique_logins|query|boolean|false|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+## read_logins_per_sp_logins_per_sp_get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /logins_per_sp?tenenv_id=0 \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /logins_per_sp?tenenv_id=0 HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/logins_per_sp?tenenv_id=0',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/logins_per_sp',
+ params: {
+ 'tenenv_id' => 'integer'
+}, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/logins_per_sp', params={
+ 'tenenv_id': '0'
+}, headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/logins_per_sp', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/logins_per_sp?tenenv_id=0");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/logins_per_sp", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /logins_per_sp`
+
+*Read Logins Per Sp*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|offset|query|integer|false|none|
+|idp|query|string|false|none|
+|startDate|query|string|false|none|
+|endDate|query|string|false|none|
+|tenenv_id|query|integer|true|none|
+|unique_logins|query|boolean|false|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+## read_logins_per_country_logins_per_country_get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /logins_per_country?tenenv_id=0 \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /logins_per_country?tenenv_id=0 HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/logins_per_country?tenenv_id=0',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/logins_per_country',
+ params: {
+ 'tenenv_id' => 'integer'
+}, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/logins_per_country', params={
+ 'tenenv_id': '0'
+}, headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/logins_per_country', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/logins_per_country?tenenv_id=0");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/logins_per_country", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /logins_per_country`
+
+*Read Logins Per Country*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|offset|query|integer|false|none|
+|group_by|query|string|false|none|
+|startDate|query|string|false|none|
+|endDate|query|string|false|none|
+|tenenv_id|query|integer|true|none|
+|unique_logins|query|boolean|false|none|
+|idpId|query|integer|false|none|
+|spId|query|integer|false|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+## read_logins_countby_logins_countby_get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /logins_countby?tenenv_id=0 \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /logins_countby?tenenv_id=0 HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/logins_countby?tenenv_id=0',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/logins_countby',
+ params: {
+ 'tenenv_id' => 'integer'
+}, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/logins_countby', params={
+ 'tenenv_id': '0'
+}, headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/logins_countby', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/logins_countby?tenenv_id=0");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/logins_countby", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /logins_countby`
+
+*Read Logins Countby*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|offset|query|integer|false|none|
+|interval|query|string|false|none|
+|count_interval|query|integer|false|none|
+|tenenv_id|query|integer|true|none|
+|unique_logins|query|boolean|false|none|
+|idpId|query|integer|false|none|
+|spId|query|integer|false|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+## read_logins_groupby_logins_groupby__group_by__get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /logins_groupby/{group_by}?tenenv_id=0 \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /logins_groupby/{group_by}?tenenv_id=0 HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/logins_groupby/{group_by}?tenenv_id=0',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/logins_groupby/{group_by}',
+ params: {
+ 'tenenv_id' => 'integer'
+}, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/logins_groupby/{group_by}', params={
+ 'tenenv_id': '0'
+}, headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/logins_groupby/{group_by}', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/logins_groupby/{group_by}?tenenv_id=0");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/logins_groupby/{group_by}", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /logins_groupby/{group_by}`
+
+*Read Logins Groupby*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|group_by|path|string|true|none|
+|offset|query|integer|false|none|
+|idp|query|string|false|none|
+|sp|query|string|false|none|
+|tenenv_id|query|integer|true|none|
+|unique_logins|query|boolean|false|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+dashboard
+
+## read_tenenv_byname_tenenv__tenant_name___environment_name__get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /tenenv/{tenant_name}/{environment_name} \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /tenenv/{tenant_name}/{environment_name} HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/tenenv/{tenant_name}/{environment_name}',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/tenenv/{tenant_name}/{environment_name}',
+ params: {
+ }, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/tenenv/{tenant_name}/{environment_name}', headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/tenenv/{tenant_name}/{environment_name}', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/tenenv/{tenant_name}/{environment_name}");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/tenenv/{tenant_name}/{environment_name}", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /tenenv/{tenant_name}/{environment_name}`
+
+*Read Tenenv Byname*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|tenant_name|path|string|true|none|
+|environment_name|path|string|true|none|
+|offset|query|integer|false|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+## read_environment_byname_environment_byname__environment_name__get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /environment_byname/{environment_name} \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /environment_byname/{environment_name} HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/environment_byname/{environment_name}',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/environment_byname/{environment_name}',
+ params: {
+ }, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/environment_byname/{environment_name}', headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/environment_byname/{environment_name}', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/environment_byname/{environment_name}");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/environment_byname/{environment_name}", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /environment_byname/{environment_name}`
+
+*Read Environment Byname*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|environment_name|path|string|true|none|
+|offset|query|integer|false|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+## read_idps_idps_get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /idps?tenenv_id=0 \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /idps?tenenv_id=0 HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/idps?tenenv_id=0',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/idps',
+ params: {
+ 'tenenv_id' => 'integer'
+}, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/idps', params={
+ 'tenenv_id': '0'
+}, headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/idps', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/idps?tenenv_id=0");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/idps", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /idps`
+
+*Read Idps*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|tenenv_id|query|integer|true|none|
+|idpId|query|integer|false|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+## read_sps_sps_get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /sps?tenenv_id=0 \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /sps?tenenv_id=0 HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/sps?tenenv_id=0',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/sps',
+ params: {
+ 'tenenv_id' => 'integer'
+}, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/sps', params={
+ 'tenenv_id': '0'
+}, headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/sps', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/sps?tenenv_id=0");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/sps", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /sps`
+
+*Read Sps*
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|tenenv_id|query|integer|true|none|
+|spId|query|integer|false|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+ams
+
+## get_verification_ams_stats_ams_verification_hash_get
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X GET /ams_stats/ams_verification_hash \
+ -H 'Accept: application/json'
+
+```
+
+```http
+GET /ams_stats/ams_verification_hash HTTP/1.1
+
+Accept: application/json
+
+```
+
+```javascript
+
+const headers = {
+ 'Accept':'application/json'
+};
+
+fetch('/ams_stats/ams_verification_hash',
+{
+ method: 'GET',
+
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Accept' => 'application/json'
+}
+
+result = RestClient.get '/ams_stats/ams_verification_hash',
+ params: {
+ }, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Accept': 'application/json'
+}
+
+r = requests.get('/ams_stats/ams_verification_hash', headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('GET','/ams_stats/ams_verification_hash', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/ams_stats/ams_verification_hash");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("GET");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Accept": []string{"application/json"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("GET", "/ams_stats/ams_verification_hash", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`GET /ams_stats/ams_verification_hash`
+
+*Get Verification*
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+## get_ams_stats_ams_stats_post
+
+
+
+> Code samples
+
+```shell
+# You can also use wget
+curl -X POST /ams_stats \
+ -H 'Content-Type: application/json' \
+ -H 'Accept: application/json' \
+ -H 'Authorization: string'
+
+```
+
+```http
+POST /ams_stats HTTP/1.1
+
+Content-Type: application/json
+Accept: application/json
+Authorization: string
+
+```
+
+```javascript
+const inputBody = 'null';
+const headers = {
+ 'Content-Type':'application/json',
+ 'Accept':'application/json',
+ 'Authorization':'string'
+};
+
+fetch('/ams_stats',
+{
+ method: 'POST',
+ body: inputBody,
+ headers: headers
+})
+.then(function(res) {
+ return res.json();
+}).then(function(body) {
+ console.log(body);
+});
+
+```
+
+```ruby
+require 'rest-client'
+require 'json'
+
+headers = {
+ 'Content-Type' => 'application/json',
+ 'Accept' => 'application/json',
+ 'Authorization' => 'string'
+}
+
+result = RestClient.post '/ams_stats',
+ params: {
+ }, headers: headers
+
+p JSON.parse(result)
+
+```
+
+```python
+import requests
+headers = {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json',
+ 'Authorization': 'string'
+}
+
+r = requests.post('/ams_stats', headers = headers)
+
+print(r.json())
+
+```
+
+```php
+ 'application/json',
+ 'Accept' => 'application/json',
+ 'Authorization' => 'string',
+);
+
+$client = new \GuzzleHttp\Client();
+
+// Define array of request body.
+$request_body = array();
+
+try {
+ $response = $client->request('POST','/ams_stats', array(
+ 'headers' => $headers,
+ 'json' => $request_body,
+ )
+ );
+ print_r($response->getBody()->getContents());
+ }
+ catch (\GuzzleHttp\Exception\BadResponseException $e) {
+ // handle exception or api errors.
+ print_r($e->getMessage());
+ }
+
+ // ...
+
+```
+
+```java
+URL obj = new URL("/ams_stats");
+HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+con.setRequestMethod("POST");
+int responseCode = con.getResponseCode();
+BufferedReader in = new BufferedReader(
+ new InputStreamReader(con.getInputStream()));
+String inputLine;
+StringBuffer response = new StringBuffer();
+while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+}
+in.close();
+System.out.println(response.toString());
+
+```
+
+```go
+package main
+
+import (
+ "bytes"
+ "net/http"
+)
+
+func main() {
+
+ headers := map[string][]string{
+ "Content-Type": []string{"application/json"},
+ "Accept": []string{"application/json"},
+ "Authorization": []string{"string"},
+ }
+
+ data := bytes.NewBuffer([]byte{jsonReq})
+ req, err := http.NewRequest("POST", "/ams_stats", data)
+ req.Header = headers
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ // ...
+}
+
+```
+
+`POST /ams_stats`
+
+*Get Ams Stats*
+
+> Body parameter
+
+```json
+null
+```
+
+Parameters
+
+|Name|In|Type|Required|Description|
+|---|---|---|---|---|
+|Authorization|header|string|false|none|
+|body|body|any|true|none|
+
+> Example responses
+
+> 200 Response
+
+```json
+null
+```
+
+Responses
+
+|Status|Meaning|Description|Schema|
+|---|---|---|---|
+|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Successful Response|Inline|
+|422|[Unprocessable Entity](https://tools.ietf.org/html/rfc2518#section-10.3)|Validation Error|[HTTPValidationError](#schemahttpvalidationerror)|
+
+Response Schema
+
+
+This operation does not require authentication
+
+
+# Schemas
+
+
+
+
+
+
+
+
+```json
+{
+ "name": "string",
+ "description": "string",
+ "source": "string",
+ "id": 0
+}
+
+```
+
+Community_InfoRead
+
+### Properties
+
+|Name|Type|Required|Restrictions|Description|
+|---|---|---|---|---|
+|name|string|true|none|none|
+|description|string|true|none|none|
+|source|string|true|none|none|
+|id|integer|true|none|none|
+
+Country_CodesRead
+
+
+
+
+
+
+```json
+{
+ "countrycode": "string",
+ "country": "string",
+ "id": 0
+}
+
+```
+
+Country_CodesRead
+
+### Properties
+
+|Name|Type|Required|Restrictions|Description|
+|---|---|---|---|---|
+|countrycode|string|true|none|none|
+|country|string|true|none|none|
+|id|integer|true|none|none|
+
+HTTPValidationError
+
+
+
+
+
+
+```json
+{
+ "detail": [
+ {
+ "loc": [
+ "string"
+ ],
+ "msg": "string",
+ "type": "string"
+ }
+ ]
+}
+
+```
+
+HTTPValidationError
+
+### Properties
+
+|Name|Type|Required|Restrictions|Description|
+|---|---|---|---|---|
+|detail|[[ValidationError](#schemavalidationerror)]|false|none|none|
+
+IdentityprovidersmapRead
+
+
+
+
+
+
+```json
+{
+ "entityid": "string",
+ "name": "string",
+ "id": 0
+}
+
+```
+
+IdentityprovidersmapRead
+
+### Properties
+
+|Name|Type|Required|Restrictions|Description|
+|---|---|---|---|---|
+|entityid|string|true|none|none|
+|name|string|true|none|none|
+|id|integer|true|none|none|
+
+
+
+
+
+
+
+
+```json
+{
+ "community_id": 0,
+ "hasheduserid": "string",
+ "status": "string",
+ "community_info": {
+ "name": "string",
+ "description": "string",
+ "source": "string",
+ "id": 0
+ }
+}
+
+```
+
+MembersReadWithCommunityInfo
+
+### Properties
+
+|Name|Type|Required|Restrictions|Description|
+|---|---|---|---|---|
+|community_id|integer|true|none|none|
+|hasheduserid|string|true|none|none|
+|status|string|true|none|none|
+|community_info|[Community_InfoRead](#schemacommunity_inforead)|true|none|none|
+
+ServiceprovidersmapRead
+
+
+
+
+
+
+```json
+{
+ "identifier": "string",
+ "name": "string",
+ "id": 0
+}
+
+```
+
+ServiceprovidersmapRead
+
+### Properties
+
+|Name|Type|Required|Restrictions|Description|
+|---|---|---|---|---|
+|identifier|string|true|none|none|
+|name|string|true|none|none|
+|id|integer|true|none|none|
+
+Statistics_Country_HashedwithInfo
+
+
+
+
+
+
+```json
+{
+ "date": "2019-08-24",
+ "hasheduserid": "string",
+ "sourceidpid": 0,
+ "serviceid": 0,
+ "countryid": 0,
+ "count": 0,
+ "identityprovider_info": {
+ "entityid": "string",
+ "name": "string",
+ "id": 0
+ },
+ "serviceprovider_info": {
+ "identifier": "string",
+ "name": "string",
+ "id": 0
+ },
+ "country_info": {
+ "countrycode": "string",
+ "country": "string",
+ "id": 0
+ }
+}
+
+```
+
+Statistics_Country_HashedwithInfo
+
+### Properties
+
+|Name|Type|Required|Restrictions|Description|
+|---|---|---|---|---|
+|date|string(date)|true|none|none|
+|hasheduserid|string|true|none|none|
+|sourceidpid|integer|true|none|none|
+|serviceid|integer|true|none|none|
+|countryid|integer|true|none|none|
+|count|integer|true|none|none|
+|identityprovider_info|[IdentityprovidersmapRead](#schemaidentityprovidersmapread)|false|none|none|
+|serviceprovider_info|[ServiceprovidersmapRead](#schemaserviceprovidersmapread)|false|none|none|
+|country_info|[Country_CodesRead](#schemacountry_codesread)|false|none|none|
+
+ValidationError
+
+
+
+
+
+
+```json
+{
+ "loc": [
+ "string"
+ ],
+ "msg": "string",
+ "type": "string"
+}
+
+```
+
+ValidationError
+
+### Properties
+
+|Name|Type|Required|Restrictions|Description|
+|---|---|---|---|---|
+|loc|[anyOf]|true|none|none|
+
+anyOf
+
+|Name|Type|Required|Restrictions|Description|
+|---|---|---|---|---|
+|» *anonymous*|string|false|none|none|
+
+or
+
+|Name|Type|Required|Restrictions|Description|
+|---|---|---|---|---|
+|» *anonymous*|integer|false|none|none|
+
+continued
+
+|Name|Type|Required|Restrictions|Description|
+|---|---|---|---|---|
+|msg|string|true|none|none|
+|type|string|true|none|none|
+
diff --git a/app/auth/__init__.py b/app/auth/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/app/auth/auth.py b/app/auth/auth.py
new file mode 100644
index 0000000..ed8759d
--- /dev/null
+++ b/app/auth/auth.py
@@ -0,0 +1,260 @@
+"""
+Module for validating Open ID Connect tokens.
+Usage
+=====
+.. code-block:: python3
+ # This assumes you've already configured Auth in your_app/auth.py
+ from your_app.auth import auth
+ @app.get("/auth")
+ def test_auth(authenticated_user: IDToken = Security(auth.required)):
+ return f"Hello {authenticated_user.preferred_username}"
+"""
+
+from typing import List
+from typing import Optional
+from typing import Type
+
+from fastapi import Depends
+from fastapi import HTTPException
+from fastapi import Request
+from fastapi import status
+from fastapi.openapi.models import OAuthFlowAuthorizationCode
+from fastapi.openapi.models import OAuthFlowClientCredentials
+from fastapi.openapi.models import OAuthFlowImplicit
+from fastapi.openapi.models import OAuthFlowPassword
+from fastapi.openapi.models import OAuthFlows
+from fastapi.security import HTTPAuthorizationCredentials
+from fastapi.security import HTTPBearer
+from fastapi.security import OAuth2
+from fastapi.security import SecurityScopes
+from jose import ExpiredSignatureError
+from jose import JWTError
+from jose import jwt
+from jose.exceptions import JWTClaimsError
+
+from app.auth import discovery
+from app.auth.grant_types import GrantType
+from app.auth.idtoken_types import IDToken
+
+
+class Auth(OAuth2):
+ def __init__(
+ self,
+ openid_connect_url: str,
+ issuer: Optional[str] = None,
+ client_id: Optional[str] = None,
+ redirect_uri: Optional[str] = None,
+ scopes: List[str] = list(),
+ grant_types: List[GrantType] = [GrantType.IMPLICIT],
+ signature_cache_ttl: int = 3600,
+ idtoken_model: Type[IDToken] = IDToken,
+ ):
+ """Configure authentication :func:`auth = Auth(...) ` and then:
+ 1. Show authentication in the interactive docs with :func:`Depends(auth) `
+ when setting up FastAPI.
+ 2. Use :func:`Security(auth.required) ` or
+ :func:`Security(auth.optional) ` in your endpoints to
+ check user credentials.
+ Args:
+ openid_connect_url (URL): URL to the "well known" openid connect config
+ e.g. https://dev-123456.okta.com/.well-known/openid-configuration
+ issuer (URL): (Optional) The issuer URL from your auth server.
+ client_id (str): (Optional) The client_id configured by your auth server.
+ scopes (Dict[str, str]): (Optional) A dictionary of scopes and their descriptions.
+ grant_types (List[GrantType]): (Optional) Grant types shown in docs.
+ signature_cache_ttl (int): (Optional) How many seconds your app should
+ cache the authorization server's public signatures.
+ idtoken_model (Type): (Optional) The model to use for validating the ID Token.
+ Raises:
+ Nothing intentional
+ """
+
+ self.openid_connect_url = openid_connect_url
+ self.issuer = issuer
+ self.client_id = client_id
+ self.idtoken_model = idtoken_model
+ self.scopes = scopes
+ self.redirect_uri = redirect_uri
+
+ self.discover = discovery.configure(cache_ttl=signature_cache_ttl)
+ oidc_discoveries = self.discover.auth_server(
+ openid_connect_url=self.openid_connect_url
+ )
+ scopes_dict = {
+ scope: "" for scope in self.discover.supported_scopes(oidc_discoveries)
+ }
+
+ flows = OAuthFlows()
+ if GrantType.AUTHORIZATION_CODE in grant_types:
+ flows.authorizationCode = OAuthFlowAuthorizationCode(
+ authorizationUrl=self.discover.authorization_url(oidc_discoveries),
+ tokenUrl=self.discover.token_url(oidc_discoveries),
+ scopes=scopes_dict,
+ )
+
+ if GrantType.CLIENT_CREDENTIALS in grant_types:
+ flows.clientCredentials = OAuthFlowClientCredentials(
+ tokenUrl=self.discover.token_url(oidc_discoveries),
+ scopes=scopes_dict,
+ )
+
+ if GrantType.PASSWORD in grant_types:
+ flows.password = OAuthFlowPassword(
+ tokenUrl=self.discover.token_url(oidc_discoveries),
+ scopes=scopes_dict,
+ )
+
+ if GrantType.IMPLICIT in grant_types:
+ flows.implicit = OAuthFlowImplicit(
+ authorizationUrl=self.discover.authorization_url(oidc_discoveries),
+ scopes=scopes_dict,
+ )
+
+ super().__init__(
+ scheme_name="OIDC",
+ flows=flows,
+ auto_error=False,
+ )
+
+ async def __call__(self, request: Request) -> None:
+ return None
+
+ def required(
+ self,
+ security_scopes: SecurityScopes,
+ authorization_credentials: Optional[HTTPAuthorizationCredentials] = Depends(
+ HTTPBearer()
+ ),
+ ) -> IDToken:
+ """Validate and parse OIDC ID token against configuration.
+ Note this function caches the signatures and algorithms of the issuing
+ server for signature_cache_ttl seconds.
+ Args:
+ security_scopes (SecurityScopes): Security scopes
+ auth_header (str): Base64 encoded OIDC Token. This is invoked
+ behind the scenes by Depends.
+ Return:
+ IDToken (self.idtoken_model): User information
+ raises:
+ HTTPException(status_code=401, detail=f"Unauthorized: {err}")
+ IDToken validation errors
+ """
+
+ id_token = self.authenticate_user(
+ security_scopes,
+ authorization_credentials,
+ auto_error=True,
+ )
+ if id_token is None:
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED)
+ else:
+ return id_token
+
+ def optional(
+ self,
+ security_scopes: SecurityScopes,
+ authorization_credentials: Optional[HTTPAuthorizationCredentials] = Depends(
+ HTTPBearer(auto_error=False)
+ ),
+ ) -> Optional[IDToken]:
+ """Optionally validate and parse OIDC ID token against configuration.
+ Will not raise if the user is not authenticated. Note this function
+ caches the signatures and algorithms of the issuing server for
+ signature_cache_ttl seconds.
+ Args:
+ security_scopes (SecurityScopes): Security scopes
+ auth_header (str): Base64 encoded OIDC Token. This is invoked
+ behind the scenes by Depends.
+ Return:
+ IDToken (self.idtoken_model): User information
+ raises:
+ IDToken validation errors
+ """
+
+ return self.authenticate_user(
+ security_scopes,
+ authorization_credentials,
+ auto_error=False,
+ )
+
+ def authenticate_user(
+ self,
+ security_scopes: SecurityScopes,
+ authorization_credentials: Optional[HTTPAuthorizationCredentials],
+ auto_error: bool,
+ ) -> Optional[IDToken]:
+ """Validate and parse OIDC ID token against against configuration.
+ Note this function caches the signatures and algorithms of the issuing server
+ for signature_cache_ttl seconds.
+ Args:
+ security_scopes (SecurityScopes): Security scopes
+ auth_header (str): Base64 encoded OIDC Token
+ auto_error (bool): If True, will raise an HTTPException if the user
+ is not authenticated.
+ Return:
+ IDToken (self.idtoken_model): User information
+ raises:
+ HTTPException(status_code=401, detail=f"Unauthorized: {err}")
+ """
+
+ if (
+ authorization_credentials is None
+ or authorization_credentials.scheme.lower() != "bearer"
+ ):
+ if auto_error:
+ raise HTTPException(
+ status.HTTP_401_UNAUTHORIZED, detail="Missing bearer token"
+ )
+ else:
+ return None
+
+ oidc_discoveries = self.discover.auth_server(
+ openid_connect_url=self.openid_connect_url
+ )
+ key = self.discover.public_keys(oidc_discoveries)
+ algorithms = self.discover.signing_algos(oidc_discoveries)
+
+ try:
+ id_token = jwt.decode(
+ authorization_credentials.credentials,
+ key,
+ algorithms,
+ issuer=self.issuer,
+ audience=self.client_id,
+ options={
+ # Disabled at_hash check since we aren't using the access token
+ "verify_at_hash": False,
+ "verify_iss": self.issuer is not None,
+ "verify_aud": self.client_id is not None,
+ },
+ )
+
+ print(id_token)
+
+ # XXX The aud should always be present?
+ if (
+ "aud" in id_token
+ and type(id_token["aud"]) == list
+ and len(id_token["aud"]) >= 1
+ and "azp" not in id_token
+ ):
+ raise JWTError(
+ 'Missing authorized party "azp" in IDToken when there '
+ "are multiple audiences"
+ )
+
+ except (ExpiredSignatureError, JWTError, JWTClaimsError) as error:
+ raise HTTPException(status_code=401, detail=f"Unauthorized: {error}")
+
+ expected_scopes = set(self.scopes + security_scopes.scopes)
+ token_scopes = id_token.get("scope", "").split(" ")
+ if not expected_scopes.issubset(token_scopes):
+ raise HTTPException(
+ status.HTTP_401_UNAUTHORIZED,
+ detail=(
+ f"Missing scope token, expected {expected_scopes} to be a "
+ f"subset of received {token_scopes}",
+ ),
+ )
+
+ return self.idtoken_model(**id_token)
\ No newline at end of file
diff --git a/app/auth/discovery.py b/app/auth/discovery.py
new file mode 100644
index 0000000..af5aefe
--- /dev/null
+++ b/app/auth/discovery.py
@@ -0,0 +1,49 @@
+from typing import Dict
+import requests
+from cachetools import TTLCache
+from cachetools import cached
+from threading import Lock
+
+
+def configure(*_, cache_ttl: int):
+ @cached(TTLCache(1, cache_ttl), key=lambda d: d["jwks_uri"], lock=Lock())
+ def get_authentication_server_public_keys(OIDC_spec: Dict):
+ """
+ Retrieve the public keys used by the authentication server
+ for signing OIDC ID tokens.
+ """
+ keys_uri = OIDC_spec["jwks_uri"]
+ r = requests.get(keys_uri)
+ keys = r.json()
+ return keys
+
+ def get_signing_algos(OIDC_spec: Dict):
+ algos = OIDC_spec["id_token_signing_alg_values_supported"]
+ return algos
+
+ @cached(TTLCache(1, cache_ttl), lock=Lock())
+ def discover_auth_server(*_, openid_connect_url: str) -> Dict:
+ r = requests.get(openid_connect_url)
+ # Raise if the auth server is failing since we can't verify tokens
+ r.raise_for_status()
+ configuration = r.json()
+ return configuration
+
+ def get_authorization_url(OIDC_spec: Dict) -> str:
+ return OIDC_spec["authorization_endpoint"]
+
+ def get_token_url(OIDC_spec: Dict) -> str:
+ return OIDC_spec["token_endpoint"]
+
+ def get_supported_scopes(OIDC_spec: Dict) -> str:
+ return OIDC_spec["scopes_supported"]
+
+ class functions:
+ auth_server = discover_auth_server
+ public_keys = get_authentication_server_public_keys
+ signing_algos = get_signing_algos
+ authorization_url = get_authorization_url
+ token_url = get_token_url
+ supported_scopes = get_supported_scopes
+
+ return functions
diff --git a/app/auth/grant_types.py b/app/auth/grant_types.py
new file mode 100644
index 0000000..7db2df7
--- /dev/null
+++ b/app/auth/grant_types.py
@@ -0,0 +1,10 @@
+from enum import Enum
+
+
+class GrantType(str, Enum):
+ """Grant types that can be used in the interactive documentation."""
+
+ AUTHORIZATION_CODE = "authorization_code"
+ CLIENT_CREDENTIALS = "client_credentials"
+ IMPLICIT = "implicit"
+ PASSWORD = "password" # nosec
\ No newline at end of file
diff --git a/app/auth/idtoken_types.py b/app/auth/idtoken_types.py
new file mode 100644
index 0000000..313bd5e
--- /dev/null
+++ b/app/auth/idtoken_types.py
@@ -0,0 +1,54 @@
+from typing import List
+from typing import Union
+
+from pydantic import BaseModel
+from pydantic import Extra
+
+
+class IDToken(BaseModel):
+ """Pydantic model representing an OIDC ID Token.
+ ID Tokens are polymorphic and may have many attributes not defined in the spec thus this model accepts
+ all addition fields. Only required fields are listed in the attributes section of this docstring or
+ enforced by pydantic.
+ See the specifications here. https://openid.net/specs/openid-connect-core-1_0.html#IDToken
+ Parameters:
+ iss (str): Issuer Identifier for the Issuer of the response.
+ sub (str): Subject Identifier.
+ aud (Union[str, List[str]]): Audience(s) that this ID Token is intended for.
+ exp (str): Expiration time on or after which the ID Token MUST NOT be accepted for processing.
+ iat (iat): Time at which the JWT was issued.
+ """
+
+ iss: str
+ sub: str
+ aud: Union[str, List[str]]
+ exp: int
+ iat: int
+
+ class Config:
+ extra = Extra.allow
+
+
+class OktaIDToken(IDToken):
+ """Pydantic Model for the IDToken returned by Okta's OIDC implementation."""
+
+ auth_time: int
+ ver: int
+ jti: str
+ amr: List[str]
+ idp: str
+ nonce: str
+ at_hash: str
+ name: str
+ email: str
+ preferred_username: str
+
+
+class KeycloakIDToken(IDToken):
+ """Pydantic Model for the IDToken returned by Keycloak's OIDC implementation."""
+
+ jti: str
+ name: str
+ email: str
+ email_verified: bool
+ preferred_username: str
diff --git a/app/database.py b/app/database.py
index d9b5955..b67e276 100644
--- a/app/database.py
+++ b/app/database.py
@@ -1,12 +1,13 @@
-import os
+from app.utils import configParser
+from sqlmodel import create_engine, Session
-from sqlmodel import create_engine, SQLModel, Session
-
-# Initialize
-url = os.getenv('DATABASE_URL')
-engine = create_engine(url)
def get_session():
+ # Initialize
+ config_file = 'config.global.py'
+
+ url = configParser.getConfig('database_parameters', config_file)['database_url']
+ engine = create_engine(url)
+
with Session(engine) as session:
yield session
-
diff --git a/app/ingester/__init__.py b/app/ingester/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/app/ingester/communityIngester.py b/app/ingester/communityIngester.py
new file mode 100644
index 0000000..915590f
--- /dev/null
+++ b/app/ingester/communityIngester.py
@@ -0,0 +1,88 @@
+from app.logger import log
+from ..database import get_session
+from sqlalchemy.exc import NoResultFound
+from .utilsIngester import utilsIngester
+
+
+class CommunityDataIngester:
+ logger = log.get_logger("CommunityDataIngester")
+
+ @classmethod
+ def getCommunityId(cls, communityName, tenenvId, session):
+ # Check if community exists
+ try:
+ communityId = session.exec(
+ """
+ SELECT id FROM community_info
+ WHERE name = '{0}' AND tenenv_id={1}
+ """.format(
+ communityName, tenenvId
+ )
+ ).one()
+ except NoResultFound:
+ cls.logger.error("""Community with name {0}
+ not found for
+ tenenvId {1}""".format(communityName,
+ tenenvId))
+ communityId = None
+ return communityId
+
+ @classmethod
+ def ingestCommunityDataPerTenenv(cls, tenenvId, session):
+ # get dates not mapped for communities data
+ datesNotMapped = utilsIngester.getDatesNotMapped(
+ "community",
+ "updated",
+ tenenvId,
+ session)
+ between = ""
+ if datesNotMapped[0] is not None:
+ between = " AND (date BETWEEN '{0}' AND '{1}')".format(
+ datesNotMapped[0], datesNotMapped[1])
+ elif datesNotMapped[1] is not None:
+ between = " AND date <= '{0}'".format(
+ datesNotMapped[1]
+ )
+ communitiesNotMapped = session.exec("""
+ SELECT jsondata FROM statistics_raw WHERE (type='vo')
+ AND tenenv_id={0} {1}
+ """.format(tenenvId, between)).all()
+ communityMappedItems = 0
+ for community in communitiesNotMapped:
+ print(community[0])
+ communityId = session.exec("""INSERT INTO community_info(
+ name, description, source, tenenv_id)
+ VALUES ('{0}','{1}','{2}', {3})
+ ON CONFLICT(name, tenenv_id)
+ DO UPDATE
+ set description='{1}'
+ RETURNING id;""".format(community[0]['voName'],
+ community[0]['voDescription'],
+ community[0]['source'],
+ tenenvId)).one()
+ session.commit()
+ print(communityId)
+ if (communityId[0] is not None):
+ session.exec("""INSERT INTO community(community_id, created, updated, status,
+ tenenv_id)
+ VALUES ({0},'{1}','{1}','{2}',{3})
+ ON CONFLICT(community_id, tenenv_id)
+ DO UPDATE
+ set status='{2}', updated='{1}'
+ """.format(communityId[0],
+ community[0]['date'], community[0]['status'], tenenvId))
+ session.commit()
+ communityMappedItems += 1
+ cls.logger.info("""{0} communities ingested or updated""".
+ format(communityMappedItems))
+
+ @classmethod
+ def ingestCommunityData(cls):
+ session_generator = get_session()
+ session = next(session_generator)
+ tenenvIds = session.exec("""SELECT id FROM tenenv_info""").all()
+ # for each tenenv on database try to ingest CommunityData
+ # from statistics_raw table
+ for tenenvId in tenenvIds:
+ CommunityDataIngester.ingestCommunityDataPerTenenv(
+ tenenvId[0], session)
diff --git a/app/ingester/ingestData.py b/app/ingester/ingestData.py
new file mode 100644
index 0000000..a6a6bd5
--- /dev/null
+++ b/app/ingester/ingestData.py
@@ -0,0 +1,13 @@
+from .communityIngester import CommunityDataIngester
+from .usersIngester import UserDataIngester
+from .membeshipIngester import MembershipDataIngester
+from .loginsIngester import LoginDataIngester
+
+# Ingest Communities
+CommunityDataIngester.ingestCommunityData()
+# Ingest Users
+UserDataIngester.ingestUserData()
+# Ingest Memberships
+MembershipDataIngester.ingestMembershipData()
+# Ingest Logins
+LoginDataIngester.ingestLoginData()
diff --git a/app/ingester/loginsIngester.py b/app/ingester/loginsIngester.py
new file mode 100644
index 0000000..98dddc7
--- /dev/null
+++ b/app/ingester/loginsIngester.py
@@ -0,0 +1,234 @@
+from app.logger import log
+from ..database import get_session
+from app.utils.ipDatabase import geoip2Database
+from sqlalchemy.exc import NoResultFound
+from .utilsIngester import utilsIngester
+import hashlib
+
+class LoginDataIngester:
+ logger = log.get_logger("LoginDataIngester")
+
+ @classmethod
+ def getIdpId(cls, entityid, idpName, tenenvId, session):
+ # Check if IdP exists
+ try:
+ idpId = session.exec(
+ """
+ SELECT id, name FROM identityprovidersmap
+ WHERE entityid = '{0}' AND tenenv_id={1}
+ """.format(
+ entityid, tenenvId
+ )
+ ).one()
+ # Update idpName with the latest
+ if (idpId[0] is not None and idpName is not None and idpName != ''
+ and idpId[1] != idpName):
+ session.exec(
+ """
+ UPDATE identityprovidersmap SET name = '{0}'
+ WHERE id = {1}
+ """.format(idpName, idpId[0])
+ )
+ except NoResultFound:
+ cls.logger.info("""Idp with name {0} and entityid {1}
+ will be created for
+ tenenvId {2}""".format(idpName,
+ entityid,
+ tenenvId))
+ idpId = session.exec(
+ """
+ INSERT INTO identityprovidersmap (entityid, name, tenenv_id)
+ VALUES ('{0}', '{1}', {2})
+ RETURNING id;
+ """.format(
+ entityid, idpName, tenenvId
+ )
+ ).one()
+ return idpId
+
+ @classmethod
+ def getSpId(cls, identifier, spName, tenenvId, session):
+ # Check if Sp exists
+ try:
+ spId = session.exec(
+ """
+ SELECT id, name FROM serviceprovidersmap
+ WHERE identifier = '{0}' AND tenenv_id={1}
+ """.format(
+ identifier, tenenvId
+ )
+ ).one()
+ # Update spName with the latest
+ if (spId[0] is not None and spName is not None and spName != ''
+ and spId[1] != spName):
+ session.exec(
+ """
+ UPDATE serviceprovidersmap SET name = '{0}'
+ WHERE id = {1}
+ """.format(spName, spId[0])
+ )
+ except NoResultFound:
+ # If Sp not exists then add it to database
+ cls.logger.info("""Sp with name {0} and identifier {1}
+ will be created for
+ tenenvId {2}""".format(spName,
+ identifier,
+ tenenvId))
+ spId = session.exec(
+ """
+ INSERT INTO serviceprovidersmap (identifier, name, tenenv_id)
+ SELECT '{0}', '{1}', {2}
+ WHERE NOT EXISTS (
+ SELECT 1 FROM serviceprovidersmap
+ WHERE identifier = '{0}'
+ )
+ RETURNING id;
+ """.format(identifier, spName, tenenvId)
+ ).one()
+ return spId
+
+ @classmethod
+ def getCountryFromCountryCode(cls, countryData, session):
+ try:
+ countryId = session.exec(
+ """
+ SELECT id FROM country_codes
+ WHERE countrycode = '{0}'
+ """.format(
+ countryData[0]
+ )
+ ).one()
+ except NoResultFound:
+ cls.logger.info("""Country with name {0}
+ will be created""".format(countryData[1]))
+ countryId = session.exec(
+ """
+ INSERT INTO country_codes (countrycode, country)
+ SELECT '{0}', '{1}'
+ WHERE NOT EXISTS (
+ SELECT 1 FROM country_codes
+ WHERE countrycode = '{0}'
+ )
+ RETURNING id;
+ """.format(countryData[0], countryData[1])
+ ).one()
+ return countryId
+
+ @classmethod
+ def getCountryFromIP(cls, ipAddress, session):
+ # handler for ip databases
+ ipDatabaseHandler = geoip2Database()
+ # get country code/ name
+ try:
+ countryData = ipDatabaseHandler.getCountryFromIp(ipAddress)
+ if (countryData[0] is None):
+ countryData[0] = 'UN'
+ countryData[1] = 'Unknown'
+ cls.logger.warning("""
+ ip {0} not found at database""".format(ipAddress))
+ except (Exception):
+ countryData = ['UN', 'Unknown']
+ cls.logger.warning("""
+ ip {0} not found at database""".format(ipAddress))
+ # Save country if not exists
+ try:
+ countryId = session.exec(
+ """
+ SELECT id FROM country_codes
+ WHERE countrycode = '{0}'
+ """.format(
+ countryData[0]
+ )
+ ).one()
+ except NoResultFound:
+ cls.logger.info("""Country with name {0}
+ will be created""".format(countryData[1]))
+ countryId = session.exec(
+ """
+ INSERT INTO country_codes (countrycode, country)
+ SELECT '{0}', '{1}'
+ WHERE NOT EXISTS (
+ SELECT 1 FROM country_codes
+ WHERE countrycode = '{0}'
+ )
+ RETURNING id;
+ """.format(countryData[0], countryData[1])
+ ).one()
+ return countryId
+
+ @classmethod
+ def ingestLoginDataPerTenenv(cls, tenenvId, session):
+
+ # get dates not mapped for logi5ns data
+ datesNotMapped = utilsIngester.getDatesNotMapped(
+ "statistics_country_hashed",
+ "date",
+ tenenvId,
+ session)
+ between = ""
+ if datesNotMapped[0] is not None:
+ between = " AND (date BETWEEN '{0}' AND '{1}')".format(
+ datesNotMapped[0], datesNotMapped[1])
+ elif datesNotMapped[1] is not None:
+ between = " AND date <= '{0}'".format(
+ datesNotMapped[1]
+ )
+ cls.logger.info("""
+ {0} logins """.format(between))
+ loginsNotMapped = session.exec("""
+ SELECT jsondata FROM statistics_raw WHERE type='login'
+ AND tenenv_id={0} {1}
+ """.format(tenenvId, between)).all()
+ loginMappedItems = 0
+ for login in loginsNotMapped:
+ if (not login[0]['failedLogin']
+ and utilsIngester.validateTenenv(login[0]['tenenvId'], session)
+ and 'voPersonId' in login[0]
+ and utilsIngester.validateHashedUser(login[0]['voPersonId'],
+ login[0]['tenenvId'],
+ session)):
+
+ # Set the to None if they don't have value
+ login[0]['idpName'] = '' if not login[0].get('idpName') else login[0]['idpName']
+ login[0]['spName'] = '' if not login[0].get('spName') else login[0]['spName']
+
+ # check if idp exists in our database otherwise create it
+ idpId = LoginDataIngester.getIdpId(login[0]['entityId'],
+ login[0]['idpName'],
+ login[0]['tenenvId'],
+ session)
+ # check if sp exists in our database otherwise create it
+ spId = LoginDataIngester.getSpId(login[0]['identifier'],
+ login[0]['spName'],
+ login[0]['tenenvId'],
+ session)
+
+ if ('countryCode' in login[0] and 'countryName' in login[0]):
+ # find countryId
+ countryId = LoginDataIngester.getCountryFromCountryCode([login[0]['countryCode'], login[0]['countryName']], session)
+ # store information at statistics_country_hashed
+ session.exec(
+ """
+ INSERT INTO statistics_country_hashed(date, hasheduserid, sourceidpid, serviceid, countryid, count, tenenv_id)
+ VALUES ('{0}', '{1}', {2}, {3}, {4}, {5}, {6})
+ ON CONFLICT (date, hasheduserid, sourceidpid, serviceid, countryid, tenenv_id)
+ DO UPDATE SET count = statistics_country_hashed.count + 1
+ """.format(
+ login[0]["date"], login[0]['voPersonId'], idpId[0], spId[0], countryId[0], 1, login[0]['tenenvId']
+ )
+ )
+ session.commit()
+ loginMappedItems += 1
+ else:
+ cls.logger.warning("The record {0} was not imported due to validation errors".format(repr(login[0])))
+
+ cls.logger.info("""
+ {0} new logins ingested""".format(loginMappedItems))
+
+ @classmethod
+ def ingestLoginData(cls):
+ session_generator = get_session()
+ session = next(session_generator)
+ tenenvIds = session.exec("""SELECT id FROM tenenv_info""").all()
+ for tenenvId in tenenvIds:
+ LoginDataIngester.ingestLoginDataPerTenenv(tenenvId[0], session)
diff --git a/app/ingester/membeshipIngester.py b/app/ingester/membeshipIngester.py
new file mode 100644
index 0000000..5e6fa34
--- /dev/null
+++ b/app/ingester/membeshipIngester.py
@@ -0,0 +1,80 @@
+from app.logger import log
+from ..database import get_session
+from sqlalchemy.exc import NoResultFound
+from .utilsIngester import utilsIngester
+
+
+class MembershipDataIngester:
+ logger = log.get_logger("MembershipDataIngester")
+
+ @classmethod
+ def getCommunityId(cls, communityName, tenenvId, session):
+ # Check if IdP exists
+ try:
+ communityId = session.exec(
+ """
+ SELECT id FROM community_info
+ WHERE name = '{0}' AND tenenv_id={1}
+ """.format(
+ communityName, tenenvId
+ )
+ ).one()
+ except NoResultFound:
+ cls.logger.error("""Community with name {0}
+ not found for
+ tenenvId {1}""".format(communityName,
+ tenenvId))
+ communityId = None
+ return communityId
+
+ @classmethod
+ def ingestMembershipDataPerTenenv(cls, tenenvId, session):
+ # get dates not mapped for users data
+ datesNotMapped = utilsIngester.getDatesNotMapped(
+ "members",
+ "updated",
+ tenenvId,
+ session)
+ between = ""
+ if datesNotMapped[0] is not None:
+ between = " AND (date BETWEEN '{0}' AND '{1}')".format(
+ datesNotMapped[0], datesNotMapped[1])
+ elif datesNotMapped[1] is not None:
+ between = " AND date <= '{0}'".format(
+ datesNotMapped[1]
+ )
+ membershipsNotMapped = session.exec("""
+ SELECT jsondata FROM statistics_raw WHERE (type='membership')
+ AND tenenv_id={0} {1}
+ """.format(tenenvId, between)).all()
+ membershipMappedItems = 0
+ for membership in membershipsNotMapped:
+ communityId = MembershipDataIngester.getCommunityId(
+ membership[0]['voName'], tenenvId, session)
+ if (communityId is None):
+ cls.logger.error("""
+ VO name '{0}' not found """.format(membership[0]['voName']))
+ continue
+ session.exec("""INSERT INTO members(community_id,
+ hasheduserid, status, tenenv_id, created, updated)
+ VALUES ('{0}','{1}','{2}', {3}, '{4}', '{4}')
+ ON CONFLICT(community_id, hasheduserid, tenenv_id)
+ DO UPDATE
+ set status='{2}', updated='{4}'""". format(
+ communityId[0], membership[0]['voPersonId'], membership[0]['status'],
+ tenenvId, membership[0]['date']))
+ session.commit()
+ membershipMappedItems += 1
+ cls.logger.info("""{0} memberships ingested or updated""".
+ format(membershipMappedItems))
+
+ @classmethod
+ def ingestMembershipData(cls):
+ session_generator = get_session()
+ session = next(session_generator)
+ tenenvIds = session.exec("""SELECT id FROM tenenv_info""").all()
+ # for each tenenv on database try to ingest UserData
+ # from statistics_raw table
+ for tenenvId in tenenvIds:
+ MembershipDataIngester.ingestMembershipDataPerTenenv(
+ tenenvId[0], session)
diff --git a/app/ingester/usersIngester.py b/app/ingester/usersIngester.py
new file mode 100644
index 0000000..4ea9281
--- /dev/null
+++ b/app/ingester/usersIngester.py
@@ -0,0 +1,58 @@
+from app.logger import log
+from ..database import get_session
+from .utilsIngester import utilsIngester
+
+
+class UserDataIngester:
+ logger = log.get_logger("UserDataIngester")
+
+ @classmethod
+ def ingestUserDataPerTenenv(cls, tenenvId, session):
+ # get dates not mapped for users data
+ datesNotMapped = utilsIngester.getDatesNotMapped(
+ "users",
+ "updated",
+ tenenvId,
+ session)
+ between = ""
+ if datesNotMapped[0] is not None:
+ between = " AND (date BETWEEN '{0}' AND '{1}')".format(
+ datesNotMapped[0], datesNotMapped[1])
+ elif datesNotMapped[1] is not None:
+ between = " AND date <= '{0}'".format(
+ datesNotMapped[1]
+ )
+ cls.logger.info("""between {0}""".format(between))
+ usersNotMapped = session.exec("""
+ SELECT jsondata FROM statistics_raw WHERE (type='registration' OR type='user_status') AND tenenv_id={0} {1}
+ """.format(tenenvId, between)).all()
+ userMappedItems = 0
+ for user in usersNotMapped:
+ cls.logger.info("""hasheduserid {0}""".format(user[0]))
+ if (user[0]['type'] == 'registration' and 'status' not in user[0]):
+ user[0]['status'] = 'A'
+ if (user[0]['status'] not in ['A', 'S', 'D']):
+ cls.logger.error("""
+ user status '{0}' is not valid """.format(user[0]['status']))
+ continue
+ session.exec("""INSERT INTO users(hasheduserid, created, updated, status, tenenv_id)
+ VALUES ('{0}','{1}','{1}', '{2}', {3})
+ ON CONFLICT(hasheduserid, tenenv_id)
+ DO UPDATE SET status='{2}', updated='{1}'""". format(
+ user[0]['voPersonId'], user[0]['date'], user[0]['status'],
+ user[0]['tenenvId']))
+ session.commit()
+ userMappedItems += 1
+
+ cls.logger.info("""
+ {0} users ingested or updated""".format(userMappedItems))
+
+ @classmethod
+ def ingestUserData(cls):
+ session_generator = get_session()
+ session = next(session_generator)
+ tenenvIds = session.exec("""SELECT id FROM tenenv_info""").all()
+ # for each tenenv on database try to ingest UserData
+ # from statistics_raw table
+ for tenenvId in tenenvIds:
+ UserDataIngester.ingestUserDataPerTenenv(tenenvId[0], session)
\ No newline at end of file
diff --git a/app/ingester/utilsIngester.py b/app/ingester/utilsIngester.py
new file mode 100644
index 0000000..9b27666
--- /dev/null
+++ b/app/ingester/utilsIngester.py
@@ -0,0 +1,63 @@
+from app.logger import log
+from datetime import timedelta, date
+from sqlalchemy.exc import NoResultFound
+
+
+class utilsIngester:
+ logger = log.get_logger("utilsIngester")
+
+ @classmethod
+ def getDatesNotMapped(cls, table: str, column, tenenvId, session):
+ # Logins
+
+ maxDate = session.exec("""
+ SELECT max({0}::date) FROM {1} WHERE tenenv_id={2}
+ """.format(column, table, tenenvId)).one()
+
+ dayFrom = None
+ dayTo = None
+
+ if maxDate[0] is not None:
+ dayAfter = maxDate[0] + timedelta(days=1)
+ dayFrom = dayAfter.strftime('%Y-%m-%d 00:00:00')
+
+ yesterday = date.today() - timedelta(days=1)
+ dayTo = yesterday.strftime('%Y-%m-%d 23:59:59')
+ return [dayFrom, dayTo]
+
+ @classmethod
+ def validateTenenv(cls, tenenvId, session):
+ try:
+ tenenvId = session.exec(
+ """
+ SELECT tenenv_info.id FROM tenenv_info
+ WHERE id={0}
+ """.format(
+ tenenvId
+ )
+ ).one()
+ except NoResultFound:
+ # if tenenv_id doesn't exist return a relevant message
+ cls.logger.info("Tenenv with id {0} not found".format(tenenvId))
+ print("Tenenv not found")
+ return False
+ return True
+
+ @classmethod
+ def validateHashedUser(cls, hashedUser, tenenvId, session):
+ # Check if userid exists
+ try:
+ session.exec(
+ """
+ SELECT hasheduserid FROM users WHERE hasheduserid='{0}'
+ AND tenenv_id={1}
+ """.format(
+ # hashlib.md5(data["userid"]).hexdigest() #TypeError:
+ # Strings must be encoded before hashing
+ hashedUser, tenenvId
+ )
+ ).one()
+ except NoResultFound:
+ cls.logger.info("User {0} not found".format(hashedUser))
+ return False
+ return True
diff --git a/app/ip_databases/empty b/app/ip_databases/empty
new file mode 100644
index 0000000..e69de29
diff --git a/app/logger/__init__.py b/app/logger/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/app/logger/log.py b/app/logger/log.py
new file mode 100644
index 0000000..d38b485
--- /dev/null
+++ b/app/logger/log.py
@@ -0,0 +1,33 @@
+import logging
+import sys
+from app.utils import configParser
+from logging.handlers import TimedRotatingFileHandler
+
+FORMATTER = logging.Formatter("""%(asctime)s - %(name)s - %(levelname)s -
+ %(message)s""")
+LOG_FILE = "{0}/{1}".format(configParser.getConfig('logging', 'config.global.py')['folder'],
+ configParser.getConfig('logging', 'config.global.py')['file'])
+LEVEL = configParser.getConfig('logging', 'config.global.py')['level']
+
+
+def get_console_handler():
+ console_handler = logging.StreamHandler(sys.stdout)
+ console_handler.setFormatter(FORMATTER)
+ return console_handler
+
+
+def get_file_handler():
+ file_handler = TimedRotatingFileHandler(LOG_FILE, when='midnight')
+ file_handler.setFormatter(FORMATTER)
+ return file_handler
+
+
+def get_logger(logger_name):
+ logger = logging.getLogger(logger_name)
+ logger.setLevel(LEVEL)
+ logger.addHandler(get_console_handler())
+ logger.addHandler(get_file_handler())
+ # with this pattern, it's rarely necessary
+ # to propagate the error up to parent
+ logger.propagate = False
+ return logger
diff --git a/app/main.py b/app/main.py
index a0ee93d..84a65c9 100644
--- a/app/main.py
+++ b/app/main.py
@@ -1,24 +1,80 @@
-from typing import List, Optional
+import os
+import sys
+from pprint import pprint
+
+from xmlrpc.client import boolean
+from fastapi import Depends, FastAPI, HTTPException, Query, Request, HTTPException, status
+from starlette.middleware.cors import CORSMiddleware
+from starlette.middleware.sessions import SessionMiddleware
-from fastapi import Depends, FastAPI, HTTPException, Query
-from fastapi.middleware.cors import CORSMiddleware
from sqlmodel import Field, Session, SQLModel, create_engine, select
+from sqlalchemy import func
+from sqlalchemy.orm import selectinload
from app.database import get_session
-from app.models import *
+from app.models.community_info_model import *
+from app.models.community_model import *
+from app.models.member_model import *
+from app.models.service_model import *
+from app.models.country_model import *
+from app.models.idp_model import *
+from app.models.country_hashed_user_model import *
+
+from .routers import authenticate, communities, countries, logins, users, dashboard, ams
+from app.utils import configParser
+from app.utils.fastapiGlobals import GlobalsMiddleware, g
+
+sys.path.insert(0, os.path.realpath('__file__'))
+# Development Environment: dev
+environment = os.getenv('API_ENVIRONMENT')
+
+# Instantiate app according to the environment configuration
+app = FastAPI() if environment == "dev" else FastAPI(root_path="/api/v1",
+ root_path_in_servers=False,
+ servers=[{"url": "/api/v1"}])
+
+if environment == "dev":
+ app.add_middleware(
+ CORSMiddleware,
+ allow_origins=["*"],
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+ )
+
+app.add_middleware(SessionMiddleware,
+ secret_key="some-random-string")
+
+# Globals
+app.add_middleware(GlobalsMiddleware)
+
+# Get the tenant and environment from the request
+@app.middleware("http")
+async def get_tenacy(request: Request, call_next):
+ if 'x-tenant' in request.headers:
+ g.tenant = request.headers['x-tenant']
+ elif 'x-tenant' in request.cookies:
+ g.tenant = request.cookies['x-tenant']
-app = FastAPI()
+ if 'x-environment' in request.headers:
+ g.environment = request.headers['x-environment']
+ elif 'x-environment' in request.cookies:
+ g.environment = request.cookies['x-environment']
-origins = ["*"]
-app.add_middleware(
- CORSMiddleware,
- allow_origins=origins,
- allow_credentials=True,
- allow_methods=["*"],
- allow_headers=["*"],
-)
+ response = await call_next(request)
+ return response
+CommunityReadwithInfo.update_forward_refs(
+ Community_InfoRead=Community_InfoRead)
+Statistics_Country_HashedwithInfo.update_forward_refs(
+ IdentityprovidersmapRead=IdentityprovidersmapRead,
+ ServiceprovidersmapRead=ServiceprovidersmapRead,
+ Country_CodesRead=Country_CodesRead)
-@app.get("/ping")
-def pong():
- return {"ping": "pong!"}
+app.include_router(authenticate.router)
+app.include_router(users.router)
+app.include_router(communities.router)
+app.include_router(countries.router)
+app.include_router(logins.router)
+app.include_router(dashboard.router)
+app.include_router(ams.router)
diff --git a/app/models.py b/app/models.py
deleted file mode 100644
index 14d9973..0000000
--- a/app/models.py
+++ /dev/null
@@ -1,74 +0,0 @@
-from typing import List, Optional
-from sqlmodel import Field, Relationship, Session, SQLModel
-from sqlalchemy import UniqueConstraint
-from datetime import datetime
-
-
-# User
-class UserBase(SQLModel):
- first_name: str
- last_name: str
- email: str = Field(index=True)
- password: str
-
-
-class User(UserBase, table=True):
- __table_args__ = (UniqueConstraint("email"),)
- id: Optional[int] = Field(default=None, primary_key=True)
-
-
-class UserCreate(UserBase):
- pass
-
-
-class UserRead(UserBase):
- id: int
-
-
-class UserUpdate(SQLModel):
- first_name: Optional[str] = None
- last_name: Optional[str] = None
- email: Optional[str] = None
- password: Optional[str] = None
-
-
-class UserLogin(SQLModel):
- email: str
- password: str
-
-
-class UserLoginResponse(SQLModel):
- id: int
- email: str
- first_name: str
- last_name: str
-
-
-# Communities
-class CommunityBase(SQLModel):
- name: str
- description: str
- created_at: datetime = Field(default_factory=datetime.utcnow, nullable=False)
- modified_at: Optional[datetime] = None
-
-
-class Community(CommunityBase, table=True):
- id: Optional[int] = Field(default=None, primary_key=True)
-
-
-class CommunityRead(CommunityBase):
- id: int
-
-
-class UserCommunitysRead(CommunityBase):
- id: int
-
-
-class CommunityCreate(CommunityBase):
- pass
-
-
-class CommunitiesUpdate(SQLModel):
- name: Optional[str] = None
- description: Optional[str] = None
- modified_at: datetime = Field(default_factory=datetime.utcnow, nullable=False)
\ No newline at end of file
diff --git a/app/models/__init__.py b/app/models/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/app/models/community_info_model.py b/app/models/community_info_model.py
new file mode 100644
index 0000000..76f977a
--- /dev/null
+++ b/app/models/community_info_model.py
@@ -0,0 +1,26 @@
+from typing import List, Optional,TYPE_CHECKING
+from sqlmodel import Field, Relationship, Session, SQLModel
+from sqlalchemy import UniqueConstraint
+from datetime import date, datetime
+
+if TYPE_CHECKING:
+ from .community_model import Community
+ from .member_model import Members
+
+# Communities
+class CommunityInfoBase(SQLModel):
+ name: str
+ description: str
+ source: str
+ #created_at: datetime = Field(default_factory=datetime.utcnow, nullable=False)
+ #modified_at: Optional[datetime] = None
+
+class Community_Info(CommunityInfoBase, table=True):
+ id: Optional[int] = Field(default=None, primary_key=True)
+
+ communities: List["Community"] = Relationship(back_populates="community_info")
+ members: List["Members"] = Relationship(back_populates="community_info")
+
+class Community_InfoRead(CommunityInfoBase):
+ id: int
+
diff --git a/app/models/community_model.py b/app/models/community_model.py
new file mode 100644
index 0000000..5f38bc6
--- /dev/null
+++ b/app/models/community_model.py
@@ -0,0 +1,24 @@
+from typing import List, Optional,TYPE_CHECKING
+from sqlmodel import Field, Relationship, Session, SQLModel
+from sqlalchemy import UniqueConstraint
+from datetime import date, datetime
+
+if TYPE_CHECKING:
+ from .community_info_model import Community_Info, Community_InfoRead
+
+
+class CommunityBase(SQLModel):
+ created: date = Field(nullable=False)
+ community_id: int = Field(primary_key=True, foreign_key="community_info.id")
+
+
+class Community(CommunityBase, table=True):
+ #community_id: Optional[int] = Field(default=None, primary_key=True)
+ #id: Optional[int] = Field(default=None, primary_key=True)
+ community_info: "Community_Info" = Relationship(sa_relationship_kwargs={'uselist': False},back_populates="communities")
+
+class CommunityRead(CommunityBase):
+ pass
+
+class CommunityReadwithInfo(CommunityRead):
+ community_info: Optional["Community_InfoRead"]
diff --git a/app/models/country_hashed_user_model.py b/app/models/country_hashed_user_model.py
new file mode 100644
index 0000000..3375d57
--- /dev/null
+++ b/app/models/country_hashed_user_model.py
@@ -0,0 +1,33 @@
+from typing import List, Optional,TYPE_CHECKING
+from sqlmodel import Field, Relationship, Session, SQLModel
+from sqlalchemy import UniqueConstraint
+from datetime import date, datetime
+
+if TYPE_CHECKING:
+ from .idp_model import *
+ from .service_model import *
+ from .country_model import *
+
+
+class Statistics_Country_HashedBase(SQLModel):
+ date: date
+ hasheduserid: str
+ sourceidpid: int = Field(foreign_key="identityprovidersmap.id")
+ serviceid : int = Field(foreign_key="serviceprovidersmap.id")
+ countryid: int = Field(foreign_key="country_codes.id")
+ count: int
+
+
+class Statistics_Country_Hashed(Statistics_Country_HashedBase, table=True):
+ id: Optional[int] = Field(default=None, primary_key=True)
+ identityprovider_info: "Identityprovidersmap" = Relationship(sa_relationship_kwargs={'uselist': False},back_populates="idps")
+ serviceprovider_info: "Serviceprovidersmap" = Relationship(sa_relationship_kwargs={'uselist': False},back_populates="services")
+ country_info: "Country_Codes" = Relationship(sa_relationship_kwargs={'uselist': False},back_populates="countries")
+
+class Statistics_Country_HashedRead(Statistics_Country_HashedBase):
+ pass
+
+class Statistics_Country_HashedwithInfo(Statistics_Country_HashedRead):
+ identityprovider_info: Optional["IdentityprovidersmapRead"]
+ serviceprovider_info: Optional["ServiceprovidersmapRead"]
+ country_info: Optional["Country_CodesRead"]
diff --git a/app/models/country_model.py b/app/models/country_model.py
new file mode 100644
index 0000000..6ab7812
--- /dev/null
+++ b/app/models/country_model.py
@@ -0,0 +1,22 @@
+from typing import List, Optional,TYPE_CHECKING
+from sqlmodel import Field, Relationship, Session, SQLModel
+from sqlalchemy import UniqueConstraint
+from datetime import date, datetime
+
+if TYPE_CHECKING:
+ from .country_hashed_user_model import Statistics_Country_Hashed
+ from .member_model import Members
+
+# Communities
+class Country_CodesBase(SQLModel):
+ countrycode: str
+ country: str
+
+
+class Country_Codes(Country_CodesBase, table=True):
+ id: Optional[int] = Field(default=None, primary_key=True)
+ countries: List["Statistics_Country_Hashed"] = Relationship(back_populates="country_info")
+
+class Country_CodesRead(Country_CodesBase):
+ id: int
+
diff --git a/app/models/environment_model.py b/app/models/environment_model.py
new file mode 100644
index 0000000..71c0489
--- /dev/null
+++ b/app/models/environment_model.py
@@ -0,0 +1,20 @@
+from typing import List, Optional,TYPE_CHECKING
+from sqlmodel import Field, Relationship, Session, SQLModel
+from sqlalchemy import UniqueConstraint
+from datetime import date, datetime
+
+if TYPE_CHECKING:
+ from .country_hashed_user_model import Statistics_Country_Hashed
+
+# EnvironmentInfo
+class EnvironmentInfoBase(SQLModel):
+ name: str
+ description: str
+
+class EnvironmentInfo(EnvironmentInfoBase, table=True):
+ id: Optional[int] = Field(default=None, primary_key=True)
+ #idps: List["Statistics_Country_Hashed"] = Relationship(back_populates="identityprovider_info")
+
+class EnvironmentInfoRead(EnvironmentInfoBase):
+ id: int
+
diff --git a/app/models/idp_model.py b/app/models/idp_model.py
new file mode 100644
index 0000000..53a85df
--- /dev/null
+++ b/app/models/idp_model.py
@@ -0,0 +1,20 @@
+from typing import List, Optional,TYPE_CHECKING
+from sqlmodel import Field, Relationship, Session, SQLModel
+from sqlalchemy import UniqueConstraint
+from datetime import date, datetime
+
+if TYPE_CHECKING:
+ from .country_hashed_user_model import Statistics_Country_Hashed
+
+# IdPs
+class IdentityprovidersmapBase(SQLModel):
+ entityid: str
+ name: str
+
+class Identityprovidersmap(IdentityprovidersmapBase, table=True):
+ id: Optional[int] = Field(default=None, primary_key=True)
+ idps: List["Statistics_Country_Hashed"] = Relationship(back_populates="identityprovider_info")
+
+class IdentityprovidersmapRead(IdentityprovidersmapBase):
+ id: int
+
diff --git a/app/models/member_model.py b/app/models/member_model.py
new file mode 100644
index 0000000..820b47d
--- /dev/null
+++ b/app/models/member_model.py
@@ -0,0 +1,26 @@
+from typing import List, Optional,TYPE_CHECKING
+from sqlmodel import Field, Relationship, Session, SQLModel
+from sqlalchemy import UniqueConstraint
+from datetime import date, datetime
+
+if TYPE_CHECKING:
+ from .community_model import Community_Info,Community_InfoRead
+
+class MemberBase(SQLModel):
+ community_id: int = Field(primary_key=True, foreign_key="community_info.id")
+ hasheduserid: str = Field(primary_key=True)
+ status: str
+
+class Members(MemberBase, table=True):
+ community_info: "Community_Info" = Relationship(sa_relationship_kwargs={'uselist': False},back_populates="members")
+
+class MembersRead(MemberBase):
+ pass
+
+class MembersReadWithCommunityInfo(MembersRead):
+ community_info: "Community_InfoRead"
+
+
+
+
+
\ No newline at end of file
diff --git a/app/models/service_model.py b/app/models/service_model.py
new file mode 100644
index 0000000..8daaf8b
--- /dev/null
+++ b/app/models/service_model.py
@@ -0,0 +1,20 @@
+from typing import List, Optional,TYPE_CHECKING
+from sqlmodel import Field, Relationship, Session, SQLModel
+from sqlalchemy import UniqueConstraint
+from datetime import date, datetime
+
+if TYPE_CHECKING:
+ from .country_hashed_user_model import Statistics_Country_Hashed
+
+# Communities
+class ServiceprovidersmapBase(SQLModel):
+ identifier: str
+ name: str
+
+class Serviceprovidersmap(ServiceprovidersmapBase, table=True):
+ id: Optional[int] = Field(default=None, primary_key=True)
+ services: List["Statistics_Country_Hashed"] = Relationship(back_populates="serviceprovider_info")
+
+class ServiceprovidersmapRead(ServiceprovidersmapBase):
+ id: int
+
diff --git a/app/models/tenant_model.py b/app/models/tenant_model.py
new file mode 100644
index 0000000..ced41fc
--- /dev/null
+++ b/app/models/tenant_model.py
@@ -0,0 +1,20 @@
+from typing import List, Optional,TYPE_CHECKING
+from sqlmodel import Field, Relationship, Session, SQLModel
+from sqlalchemy import UniqueConstraint
+from datetime import date, datetime
+
+if TYPE_CHECKING:
+ from .country_hashed_user_model import Statistics_Country_Hashed
+
+# TenenvInfo
+class TenenvInfoBase(SQLModel):
+ name: str
+ description: str
+
+class TenenvInfo(TenenvInfoBase, table=True):
+ id: Optional[int] = Field(default=None, primary_key=True)
+ #idps: List["Statistics_Country_Hashed"] = Relationship(back_populates="identityprovider_info")
+
+class TenenvInfoRead(TenenvInfoBase):
+ id: int
+
diff --git a/app/routers/__init__.py b/app/routers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/app/routers/ams.py b/app/routers/ams.py
new file mode 100644
index 0000000..30c716a
--- /dev/null
+++ b/app/routers/ams.py
@@ -0,0 +1,136 @@
+import base64
+import json
+import hashlib
+from fastapi import APIRouter, Depends, HTTPException, Response, Request, Body, Header, Security
+from sqlmodel import Field, Session, SQLModel, create_engine, select
+from typing import Union
+from app.utils import configParser, globalMethods
+from app.database import get_session
+from app.utils.globalMethods import AuthNZCheck
+from fastapi.responses import PlainTextResponse
+from app.logger import log
+from fastapi.security import HTTPBearer
+from starlette.responses import JSONResponse
+from sqlalchemy.exc import NoResultFound
+from app.utils.ipDatabase import geoip2Database
+from typing import Optional
+# from ..dependencies import get_token_header
+
+router = APIRouter(
+ tags=["ams"]
+)
+
+logger = log.get_logger("ams")
+
+
+@router.get("/ams_stats/ams_verification_hash")
+async def get_verification(response: Response):
+
+ verification_hash = configParser.getConfig('ams', 'config.global.py')['verification_hash']
+ response.status_code = 200
+ response.headers["Content-Type"] = "plain/text"
+ return PlainTextResponse(verification_hash)
+
+
+async def verify_authorization_header(Authorization: Optional[str] = Header(None)):
+ authkey = configParser.getConfig('ams', 'config.global.py')['auth_key']
+ # check authorization
+ if (Authorization != authkey):
+ HTTPException(status_code=401)
+ # response.status_code = 401
+ # return PlainTextResponse('Client Certificate Authentication Failure')
+ return Authorization
+
+
+@router.post("/ams_stats")
+async def get_ams_stats(*,
+ session: Session = Depends(get_session),
+ request: Request,
+ response: Response,
+ body = Body(..., example={"name": "Item Name"}),
+ Authorization: str = Depends(verify_authorization_header)):
+
+ response.status_code = 200
+ # Access the request data
+ data = await request.json()
+ logger.debug(data)
+ messages = data.get("messages", []) # Retrieve the list of messages
+ if not messages: # if only one message exists
+ try:
+ data_dict = process_message(data.get("message").get("data"))
+ process_data(data_dict, session)
+ except Exception as e:
+ logger.error(f"Error: {e}")
+ else:
+ for item in data.get("messages", []):
+ try:
+ data_dict = process_message(item.get("message").get("data"))
+ process_data(data_dict, session)
+ except Exception as e:
+ logger.error(f"Error: {e}")
+
+ return JSONResponse({"message": "Endpoint called successfully"})
+
+
+def process_message(message):
+ decoded_data = base64.b64decode(message).decode()
+ logger.debug(decoded_data)
+ # Process the data
+ print(decoded_data)
+ # Convert the JSON-formatted string to a Python dictionary
+ data_dict = json.loads(decoded_data)
+ return data_dict
+
+
+def process_data(data, session):
+ print(data["date"])
+ if ("tenenvId" not in data
+ or "type" not in data
+ or "eventIdentifier" not in data
+ or "source" not in data
+ or "tenenvId" not in data):
+
+ raise MissingDataException("One or more required attributes are missing.")
+
+ if "ipAddress" in data:
+ # handler for ip databases
+ ipDatabaseHandler = geoip2Database()
+ countryData = ["", ""]
+ # get country code/ name
+ countryData[0] = 'UN'
+ countryData[1] = 'Unknown'
+ try:
+ countryData = ipDatabaseHandler.getCountryFromIp(data["ipAddress"])
+ except Exception:
+ print("Unknown ip Address")
+
+ data["countryCode"] = countryData[0]
+ data["countryName"] = countryData[1]
+ del data["ipAddress"]
+ if "voPersonId" in data:
+ # hash voPersonId
+ data["voPersonId"] = hashlib.md5(data['voPersonId'].encode()).hexdigest()
+ print(data)
+ session.exec(
+ """
+ INSERT INTO statistics_raw(date, type, event_identifier, source,
+ tenenv_id, jsondata)
+ VALUES ('{0}', '{1}', '{2}', '{3}', '{4}','{5}')
+ ON CONFLICT (event_identifier, source, tenenv_id)
+ DO NOTHING
+ """.format(
+ data["date"],
+ data["type"],
+ data['eventIdentifier'],
+ data['source'],
+ data['tenenvId'],
+ json.dumps(data)
+ )
+ )
+ session.commit()
+
+ return JSONResponse({"message": "Endpoint called successfully"})
+
+
+class MissingDataException(Exception):
+ pass
\ No newline at end of file
diff --git a/app/routers/authenticate.py b/app/routers/authenticate.py
new file mode 100644
index 0000000..bc4474b
--- /dev/null
+++ b/app/routers/authenticate.py
@@ -0,0 +1,180 @@
+from pprint import pprint
+from typing import Annotated, Any
+
+from fastapi import APIRouter, Depends, HTTPException, status, Security, Request
+from fastapi.responses import JSONResponse
+import json, jwt
+
+from app.utils import configParser
+import urllib.parse
+from starlette.responses import HTMLResponse, RedirectResponse
+from authlib.integrations.starlette_client import OAuth, OAuthError
+
+from app.utils.globalMethods import permissionsCalculation, g
+
+router = APIRouter(
+ tags=["authenticate"],
+ # dependencies=[Depends(get_token_header)],
+ # responses={404: {"description": "Not found"}},
+)
+
+def initializeAuthOb():
+ config_file = 'config.' + g.tenant + '.' + g.environment + '.py'
+ oidc_config = configParser.getConfig('oidc_client', config_file)
+ oauth = OAuth()
+
+ oauth.register(
+ 'rciam',
+ client_id=oidc_config['client_id'],
+ client_secret=oidc_config['client_secret'],
+ server_metadata_url=oidc_config['issuer'] + "/.well-known/openid-configuration",
+ client_kwargs={'scope': 'openid profile email voperson_id eduperson_entitlement'}
+ )
+ return oauth
+
+def getServerConfig():
+ config_file = 'config.' + g.tenant + '.' + g.environment + '.py'
+ return configParser.getConfig('server_config', config_file)
+
+@router.get('/login',
+ include_in_schema=False
+ )
+async def login_endpoint(
+ request: Request,
+ oauth_ob= Depends(initializeAuthOb),
+ server_config= Depends(getServerConfig)):
+ rciam = oauth_ob.create_client('rciam')
+ redirect_uri = server_config['protocol'] + "://" + server_config['host'] + server_config['api_path'] + "/auth"
+ return await rciam.authorize_redirect(request, redirect_uri)
+
+
+@router.get('/auth',
+ include_in_schema=False,
+ response_class=RedirectResponse)
+async def authorize_rciam(
+ request: Request,
+ oauth_ob= Depends(initializeAuthOb),
+ server_config=Depends(getServerConfig)
+):
+ login_start_url = request.cookies.get("login_start")
+ # pprint(request.cookies.get("login_start"))
+ if not login_start_url:
+ login_start_url = "/"
+
+ # Set cookies when returning a RedirectResponse
+ # https://github.com/tiangolo/fastapi/issues/2452
+ # Creating our own redirect url is what make it possible
+ # to add the cookie
+ response = RedirectResponse(url=urllib.parse.unquote(login_start_url))
+ response.delete_cookie("login_start")
+
+ rciam = oauth_ob.create_client('rciam')
+ try:
+ token = await rciam.authorize_access_token(request)
+ except OAuthError as error:
+ return HTMLResponse(f'{error.error} ')
+ user = token.get('userinfo')
+ pprint(token)
+
+ if user:
+ request.session['user'] = dict(user)
+ # Fetch the userinfo data
+ if user.get("email") is None:
+ metadata = await rciam.load_server_metadata()
+ if not metadata['userinfo_endpoint']:
+ raise RuntimeError('Missing "userinfo_endpoint" value')
+ # Make a request to the userinfo endpoint
+ user_info = await rciam.get(metadata['userinfo_endpoint'], token=token)
+ user_info.raise_for_status()
+ user_info_data = user_info.json()
+ # Encode the data to jwt
+ # todo: the key could become configurable and per tenenv
+ jwt_user = jwt.encode(payload=user_info_data,
+ key="a custom key",
+ algorithm="HS256")
+ # print(jwt_user)
+
+ # XXX The max_age of the cookie is the same as the
+ # access token max age which we extract from the token
+ # itself
+ response.headers["Access-Control-Expose-Headers"] = "X-Permissions, X-Authenticated, X-Redirect"
+ response.set_cookie(key="userinfo",
+ value=jwt_user,
+ secure=None,
+ max_age=token.get('expires_in'),
+ domain=server_config['domain'])
+
+ response.set_cookie(key="idtoken",
+ value=token.get('id_token'),
+ secure=None,
+ max_age=token.get('expires_in'),
+ domain=server_config['domain'])
+
+ response.set_cookie(key="atoken",
+ value=token.get('access_token'),
+ secure=None,
+ max_age=token.get('expires_in'),
+ domain=server_config['domain'])
+ response.headers["X-Authenticated"] = "true"
+
+ # Authorization
+ authorize_file = 'authorize.' + g.tenant + '.' + g.environment + '.py'
+ permissions = permissionsCalculation(authorize_file, user_info_data)
+ permissions_json = json.dumps(permissions).replace(" ", "").replace("\n", "")
+
+ # Set the permissions cookie.
+ jwt_persmissions = jwt.encode(payload=permissions,
+ key="a custom key",
+ algorithm="HS256")
+ response.set_cookie(key="permissions",
+ value=jwt_persmissions,
+ secure=None,
+ max_age=token.get('expires_in'),
+ domain=server_config['domain'])
+ # Add the permission to a custom header field
+ response.headers["X-Permissions"] = permissions_json
+
+ return response
+
+
+@router.get('/logout',
+ include_in_schema=False,
+ response_class=RedirectResponse)
+async def logout(
+ request: Request,
+ oauth_ob= Depends(initializeAuthOb),
+ server_config=Depends(getServerConfig)
+):
+ rciam = oauth_ob.create_client('rciam')
+ metadata = await rciam.load_server_metadata()
+ # todo: Fix this after we complete the multitenacy
+ redirect_uri = server_config['protocol'] + "://" + server_config['client'] +"/metrics"
+ logout_endpoint = metadata['end_session_endpoint'] + "?post_logout_redirect_uri=" + urllib.parse.unquote(
+ redirect_uri) + "&id_token_hint=" + request.cookies.get("idtoken")
+
+ request.session.pop('user', None)
+
+ # Set cookies when returning a RedirectResponse
+ # https://github.com/tiangolo/fastapi/issues/2452
+ response = RedirectResponse(url=logout_endpoint)
+ response.set_cookie('userinfo',
+ expires=0,
+ max_age=0,
+ domain=server_config['domain'])
+
+ response.set_cookie('idtoken',
+ expires=0,
+ max_age=0,
+ domain=server_config['domain'])
+
+ response.set_cookie(key="atoken",
+ expires=0,
+ max_age=0,
+ domain=server_config['domain'])
+
+ response.set_cookie(key="permissions",
+ expires=0,
+ max_age=0,
+ domain=server_config['domain'])
+
+ return response
diff --git a/app/routers/communities.py b/app/routers/communities.py
new file mode 100644
index 0000000..83047e9
--- /dev/null
+++ b/app/routers/communities.py
@@ -0,0 +1,137 @@
+from fastapi import APIRouter, Depends, HTTPException
+from sqlmodel import Field, Session, SQLModel, create_engine, select
+from typing import Union
+
+from app.database import get_session
+from app.models.community_info_model import *
+from app.models.community_model import *
+from app.models.member_model import MembersReadWithCommunityInfo
+from app.utils.globalMethods import AuthNZCheck
+
+
+MembersReadWithCommunityInfo.update_forward_refs(
+ Community_InfoRead=Community_InfoRead)
+
+router = APIRouter(
+ tags=["communities"],
+ dependencies=[Depends(AuthNZCheck("communities"))]
+)
+
+@router.get("/members/", response_model=List[MembersReadWithCommunityInfo])
+async def read_members(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0,
+ # community_id: Union[None, int] = None
+):
+ # if not community_id:
+ # members = session.exec(select(Members).offset(offset)).all()
+ # else:
+ members = session.exec(select(Members).offset(offset)).all()
+ return members
+
+@router.get("/min_date_communities")
+async def read_min_date_communities(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0,
+ tenenv_id: int,
+):
+ min_date = session.exec("""
+ SELECT min(created) as min_date
+ FROM community
+ WHERE tenenv_id={0}
+ """.format(tenenv_id)).one()
+ return min_date
+@router.get("/members_bystatus/")
+async def read_members_bystatus(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0,
+ community_id: Union[None, int] = None,
+ tenenv_id: int,
+):
+ if not community_id:
+ members = session.exec(select(Members).offset(offset)).all()
+ else:
+ # members = session.exec(select(Members).offset(offset)).all()
+ members = session.exec("""
+ SELECT count(*) as count, community_id, status FROM members
+ WHERE community_id={0} AND tenenv_id={1}
+ GROUP BY community_id, status
+ """.format(community_id, tenenv_id)).all()
+ # members = session.exec(""" SELECT community_id FROM members """)
+ return members
+
+
+@router.get("/communities_groupby/{group_by}")
+async def read_communities(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0,
+ group_by: str,
+ tenenv_id: int,
+ interval: Union[str, None] = None,
+ count_interval: int = None,
+ startDate: str = None,
+ endDate: str = None,
+):
+ interval_subquery = ""
+ if group_by:
+ if interval and count_interval:
+ interval_subquery = """WHERE created >
+ date_trunc('{0}', CURRENT_DATE) - INTERVAL '{1} {2}'""".format(group_by, count_interval, interval)
+ if startDate and endDate:
+ interval_subquery = """
+ WHERE created BETWEEN '{0}' AND '{1}'
+ """.format(startDate, endDate)
+ if interval_subquery == "":
+ interval_subquery = """
+ WHERE community.tenenv_id={0}
+ """.format(tenenv_id)
+ else:
+ interval_subquery += """ AND community.tenenv_id={0}
+ """.format(tenenv_id)
+
+ communities = session.exec("""
+ select count(*) as count, date_trunc( '{0}', created ) as range_date,
+ min(created) as min_date , string_agg(name,'|| ') as names,
+ string_agg(to_char(created, 'YYYY-MM-DD'),', ') as created_date,
+ string_agg(description,'|| ') as description
+ from community
+ join community_info on community.community_id=community_info.id
+ {1}
+ group by range_date
+ ORDER BY range_date ASC
+ """.format(group_by, interval_subquery)).all()
+ return communities
+
+
+@router.get("/communities/")
+async def read_community(
+ *,
+ session: Session = Depends(get_session),
+ community_id: Union[None, int] = None,
+ tenenv_id: int):
+ sql_subquery = ''
+ if community_id:
+ sql_subquery = 'id={0} and'.format(community_id)
+ community = session.exec("""
+ SELECT * FROM community_info WHERE {0} tenenv_id={1}
+ """.format(sql_subquery, tenenv_id)).all()
+ # statement = select(Community).options(selectinload(Community.community_info))
+ # result = session.exec(statement)
+ # community = result.one()
+ # if not community:
+ # raise HTTPException(status_code=404, detail="Community not found")
+ return community
+
+
+@router.get("/communities_info/", response_model=List[Community_InfoRead])
+async def read_communities_info(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0
+):
+ communities = session.exec(select(Community_Info).offset(offset)).all()
+ return communities
diff --git a/app/routers/countries.py b/app/routers/countries.py
new file mode 100644
index 0000000..c744f11
--- /dev/null
+++ b/app/routers/countries.py
@@ -0,0 +1,102 @@
+from fastapi import APIRouter, Depends, HTTPException, Query
+from app.database import get_session
+from sqlmodel import Field, Session, SQLModel, create_engine, select
+from typing import Union
+
+from app.models.country_model import *
+from app.models.country_hashed_user_model import *
+from app.utils.globalMethods import AuthNZCheck
+
+
+# from ..dependencies import get_token_header
+
+router = APIRouter(
+ tags=["countries"],
+ dependencies=[Depends(AuthNZCheck)],
+ # responses={404: {"description": "Not found"}},
+)
+
+
+@router.get("/countries/", response_model=List[Country_CodesRead])
+async def read_countries(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0
+):
+ countries = session.exec(select(Country_Codes).offset(offset)).all()
+ return countries
+
+
+@router.get("/country_stats/", response_model=List[Statistics_Country_HashedwithInfo])
+async def read_country_stats(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0
+):
+ stats = session.exec(
+ select(Statistics_Country_Hashed).offset(offset)).all()
+ return stats
+
+
+@router.get("/country_stats_by_vo/{community_id}")
+async def read_country_stats_by_vo(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0,
+ community_id: Union[None, int] = None
+):
+ stats_country = session.exec("""
+ WITH users_countries AS (
+ SELECT statistics_country_hashed.hasheduserid as userid, status, country, countrycode, count(*) as sum_count
+ FROM statistics_country_hashed
+ JOIN members ON members.hasheduserid=statistics_country_hashed.hasheduserid
+ JOIN country_codes ON countryid=country_codes.id
+ WHERE community_id={0} AND country!='Unknown'
+ GROUP BY userid, status, country, countrycode
+ ),
+ max_count_users_countries AS (
+ SELECT DISTINCT userid, status, max(sum_count) as max_sum_count,row_number() OVER (ORDER BY userid, status) as row_number
+ FROM users_countries
+ GROUP BY userid, status
+ )
+ SELECT country,countrycode,count(*) as sum
+ FROM users_countries
+ JOIN (
+ SELECT userid, status, max_sum_count, max(row_number)
+ FROM max_count_users_countries GROUP BY userid, status, max_sum_count
+ ) max_count_users_countries_no_duplicates
+ ON users_countries.userid=max_count_users_countries_no_duplicates.userid
+ AND users_countries.sum_count=max_count_users_countries_no_duplicates.max_sum_count
+ GROUP BY country,countrycode
+ ORDER BY country;
+ """.format(community_id)).all()
+ status_per_country = session.exec("""
+ WITH users_countries AS (
+ SELECT statistics_country_hashed.hasheduserid as userid, status, country, countrycode, count(*) as sum_count
+ FROM statistics_country_hashed
+ JOIN members ON members.hasheduserid=statistics_country_hashed.hasheduserid
+ JOIN country_codes ON countryid=country_codes.id
+ WHERE community_id={0} AND country!='Unknown'
+ GROUP BY userid, status, country, countrycode
+ ),
+ max_count_users_countries AS (
+ SELECT DISTINCT userid, status, max(sum_count) as max_sum_count, row_number() OVER (ORDER BY userid, status) as row_number
+ FROM users_countries
+ GROUP BY userid, status
+ )
+ SELECT country,countrycode, users_countries.status, count(*) as sum
+ FROM users_countries
+ JOIN (
+ SELECT userid, status, max_sum_count, max(row_number)
+ FROM max_count_users_countries GROUP BY userid, status, max_sum_count
+ ) max_count_users_countries_no_duplicates
+ ON users_countries.userid=max_count_users_countries_no_duplicates.userid
+ AND users_countries.sum_count=max_count_users_countries_no_duplicates.max_sum_count
+ GROUP BY country,countrycode, users_countries.status
+ ORDER BY country;
+ """.format(community_id)).all()
+
+ return {
+ 'stats': stats_country,
+ 'status': status_per_country
+ }
diff --git a/app/routers/dashboard.py b/app/routers/dashboard.py
new file mode 100644
index 0000000..2e6a46d
--- /dev/null
+++ b/app/routers/dashboard.py
@@ -0,0 +1,85 @@
+from fastapi import APIRouter, Depends, HTTPException, Request
+from sqlmodel import Field, Session, SQLModel, create_engine, select
+from typing import Union
+from xmlrpc.client import boolean
+
+from app.database import get_session
+from app.utils.globalMethods import AuthNZCheck
+
+router = APIRouter(
+ tags=["dashboard"],
+ dependencies=[Depends(AuthNZCheck("dashboard", True))]
+)
+
+@router.get("/tenenv/{tenant_name}/{environment_name}")
+async def read_tenenv_byname(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0,
+ tenant_name: str,
+ environment_name: str
+):
+ tenenv = None
+ if tenant_name and environment_name:
+ tenenv = session.exec("""
+ SELECT tenenv_info.* FROM tenenv_info
+ JOIN tenant_info ON tenant_info.id=tenant_id
+ AND LOWER(tenant_info.name)=LOWER('{0}')
+ JOIN environment_info ON environment_info.id=env_id
+ AND LOWER(environment_info.name)=LOWER('{1}')
+ """.format(tenant_name, environment_name)).all()
+ return tenenv
+
+
+@router.get("/environment_byname/{environment_name}")
+async def read_environment_byname(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0,
+ environment_name: str
+):
+ environment = None
+ if environment_name:
+ environment = session.exec("""
+ SELECT * FROM environment_info
+ WHERE name='{0}' LIMIT 1
+ """.format(environment_name)).all()
+ return environment
+
+
+@router.get("/idps")
+async def read_idps(
+ *,
+ session: Session = Depends(get_session),
+ tenenv_id: int,
+ idpId: int = None
+):
+ idpId_subquery = ""
+ if idpId:
+ idpId_subquery = """
+ AND id = {0}
+ """.format(idpId)
+ idps = session.exec("""
+ SELECT * FROM identityprovidersmap
+ WHERE tenenv_id='{0}' {1}
+ """.format(tenenv_id, idpId_subquery)).all()
+ return idps
+
+
+@router.get("/sps")
+async def read_sps(
+ *,
+ session: Session = Depends(get_session),
+ tenenv_id: int,
+ spId: int = None
+):
+ spId_subquery = ""
+ if spId:
+ spId_subquery = """
+ AND id = {0}
+ """.format(spId)
+ sps = session.exec("""
+ SELECT * FROM serviceprovidersmap
+ WHERE tenenv_id='{0}' {1}
+ """.format(tenenv_id, spId_subquery)).all()
+ return sps
diff --git a/app/routers/logins.py b/app/routers/logins.py
new file mode 100644
index 0000000..8d151fa
--- /dev/null
+++ b/app/routers/logins.py
@@ -0,0 +1,392 @@
+from pprint import pprint
+
+from fastapi import APIRouter, Depends, HTTPException, Request
+from sqlmodel import Field, Session, SQLModel, create_engine, select
+from starlette.responses import JSONResponse
+from sqlalchemy.exc import NoResultFound
+from typing import Union
+from xmlrpc.client import boolean
+
+from app.database import get_session
+from app.utils.globalMethods import AuthNZCheck
+
+
+# LOGINS ROUTES ARE OPEN
+
+router = APIRouter(
+ tags=["logins"],
+ dependencies=[Depends(AuthNZCheck("logins", True))]
+)
+
+@router.get("/min_date_logins")
+async def read_min_date_logins(
+ *,
+ request: Request,
+ session: Session = Depends(get_session),
+ tenenv_id: int,
+ unique_logins: Union[boolean, None] = False,
+):
+ unique_logins_subquery = ""
+ if unique_logins:
+ unique_logins_subquery = "AND hasheduserid != 'unknown'"
+
+ min_date = session.exec("""SELECT min(date) as min_date, max(date) as max_date
+ FROM statistics_country_hashed
+ WHERE tenenv_id={0} {1}""".format(tenenv_id, unique_logins_subquery)).one()
+ return min_date
+
+@router.get("/logins_per_idp")
+async def read_logins_per_idp(
+ *,
+ request: Request,
+ session: Session = Depends(get_session),
+ offset: int = 0,
+ sp: str = None, # type: ignore
+ startDate: str = None, # type: ignore
+ endDate: str = None, # type: ignore
+ tenenv_id: int,
+ unique_logins: Union[boolean, None] = False,
+):
+ interval_subquery = ""
+ sp_subquery_join = ""
+ if sp:
+ # Is the user authenticated?
+ AuthNZCheck("logins", False)
+
+ # Fetch the data
+ sp_subquery_join = """
+ JOIN serviceprovidersmap ON serviceprovidersmap.id=serviceid
+ AND serviceprovidersmap.tenenv_id=statistics_country_hashed.tenenv_id
+ AND serviceprovidersmap.tenenv_id={1}
+ AND serviceid = '{0}'
+ """.format(
+ sp, tenenv_id
+ )
+ unique_logins_subquery = ""
+ if unique_logins:
+ unique_logins_subquery = "AND hasheduserid != 'unknown'"
+
+ if startDate and endDate:
+ interval_subquery = """
+ AND date BETWEEN '{0}' AND '{1}'
+ """.format(startDate, endDate)
+ if unique_logins == False:
+ sub_select = """
+ sum(count) as count
+ """
+ else:
+ sub_select = """
+ count(DISTINCT hasheduserid) as count
+ """
+
+ logins = session.exec("""
+ select identityprovidersmap.id, identityprovidersmap.name, entityid, sourceidpid, {0}
+ from statistics_country_hashed
+ join identityprovidersmap ON identityprovidersmap.id=sourceidpid
+ AND identityprovidersmap.tenenv_id=statistics_country_hashed.tenenv_id
+ {1}
+ WHERE statistics_country_hashed.tenenv_id = {2}
+ {3} {4}
+ GROUP BY identityprovidersmap.id, sourceidpid, identityprovidersmap.name, entityid
+ ORDER BY count DESC
+ """.format(
+ sub_select, sp_subquery_join, tenenv_id, interval_subquery, unique_logins_subquery
+ )
+ ).all()
+
+ return logins
+
+
+@router.get("/logins_per_sp")
+async def read_logins_per_sp(
+ *,
+ session: Session = Depends(get_session),
+ request: Request,
+ offset: int = 0,
+ idp: str = None,
+ startDate: str = None,
+ endDate: str = None,
+ tenenv_id: int,
+ unique_logins: Union[boolean, None] = False,
+):
+ unique_logins_subquery = ""
+ if unique_logins:
+ unique_logins_subquery = "AND hasheduserid != 'unknown'"
+
+ interval_subquery = ""
+ idp_subquery_join = ""
+ if idp:
+ # Is the user authenticated?
+ AuthNZCheck("logins", False)
+
+ # Fetch the data
+ idp_subquery_join = """
+ JOIN identityprovidersmap ON identityprovidersmap.id=sourceidpid
+ AND identityprovidersmap.tenenv_id=statistics_country_hashed.tenenv_id
+ AND identityprovidersmap.tenenv_id={1}
+ AND identityprovidersmap.id = '{0}'
+ """.format(
+ idp, tenenv_id
+ )
+
+ if startDate and endDate:
+ interval_subquery = """
+ AND date BETWEEN '{0}' AND '{1}'
+ """.format(startDate, endDate)
+
+ if unique_logins == False:
+ sub_select = """
+ sum(count) as count
+ """
+ else:
+ sub_select = """
+ count(DISTINCT hasheduserid) as count
+ """
+
+ logins = session.exec("""
+ select serviceprovidersmap.id, serviceprovidersmap.name, identifier, serviceid, {0}
+ from statistics_country_hashed
+ join serviceprovidersmap ON serviceprovidersmap.id=serviceid
+ AND serviceprovidersmap.tenenv_id=statistics_country_hashed.tenenv_id
+ {1}
+ WHERE statistics_country_hashed.tenenv_id = {2}
+ {3} {4}
+ GROUP BY serviceprovidersmap.id, serviceid, serviceprovidersmap.name, identifier
+ ORDER BY count DESC
+ """.format(
+ sub_select, idp_subquery_join, tenenv_id, interval_subquery, unique_logins_subquery
+ )
+ ).all()
+ return logins
+
+
+@router.get("/logins_per_country")
+async def read_logins_per_country(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0,
+ group_by: Union[str, None] = None,
+ startDate: str = None,
+ endDate: str = None,
+ tenenv_id: int,
+ unique_logins: Union[boolean, None] = False,
+ idpId: Union[int, None] = None,
+ spId: Union[int, None] = None,
+):
+ unique_logins_subquery = ""
+ if unique_logins:
+ unique_logins_subquery = "AND hasheduserid != 'unknown'"
+
+ interval_subquery = ""
+ entity_subquery = ""
+ sp_subquery = ""
+ if idpId:
+ entity_subquery = """
+ AND sourceidpid = {0}
+ """.format(idpId)
+ if spId:
+ sp_subquery = """
+ AND serviceid = {0}
+ """.format(spId)
+ if group_by:
+ if startDate and endDate:
+ interval_subquery = """
+ AND date BETWEEN '{0}' AND '{1}'
+ """.format(startDate, endDate)
+
+ if unique_logins == False:
+ sub_select = """
+ sum(count) as count_country
+ """
+ sum = "sum(count)"
+ else:
+ sub_select = """
+ count(DISTINCT hasheduserid) as count_country
+ """
+ sum = "count(DISTINCT hasheduserid)"
+ logins = session.exec("""
+ SELECT range_date, sum(count_country) as count, min(min_login_date) as min_date, STRING_AGG(country, '|| ') as countries
+ FROM (
+ SELECT date_trunc('{0}', date) as range_date, min(date) as min_login_date, {1}, CONCAT(country,': ',{2}) as country
+ from statistics_country_hashed
+ JOIN country_codes ON countryid=country_codes.id
+ WHERE tenenv_id = {3}
+ {4} {5} {6} {7}
+ GROUP BY range_date, country
+ ORDER BY range_date,country ASC
+ ) country_logins
+ GROUP BY range_date
+ """.format(
+ group_by,
+ sub_select,
+ sum,
+ tenenv_id,
+ interval_subquery,
+ entity_subquery,
+ sp_subquery,
+ unique_logins_subquery
+ )
+ ).all()
+ else:
+ if startDate and endDate:
+ interval_subquery = """
+ AND date BETWEEN '{0}' AND '{1}'
+ """.format(startDate, endDate)
+
+ if unique_logins == False:
+ sub_select = """
+ sum(count) as sum
+ """
+ else:
+ sub_select = """
+ count(DISTINCT hasheduserid) as sum
+ """
+ logins = session.exec("""
+ SELECT country, countrycode, {0}
+ FROM statistics_country_hashed
+ JOIN country_codes ON countryid=country_codes.id
+ WHERE tenenv_id = {1}
+ {2} {3} {4}
+ GROUP BY country,countrycode
+ """.format(
+ sub_select, tenenv_id, interval_subquery, entity_subquery, sp_subquery
+ )
+ ).all()
+ return logins
+
+
+@router.get("/logins_countby")
+async def read_logins_countby(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0,
+ interval: Union[str, None] = None,
+ count_interval: int = None,
+ tenenv_id: int,
+ unique_logins: Union[boolean, None] = False,
+ idpId: Union[int, None] = None,
+ spId: Union[int, None] = None,
+):
+
+ interval_subquery = ""
+ idp_subquery = ""
+ sp_subquery = ""
+ if interval and count_interval:
+ interval_subquery = """AND date >
+ CURRENT_DATE - INTERVAL '{0} {1}'""".format(count_interval, interval)
+ if idpId:
+ idp_subquery = """
+ AND sourceidpid = '{0}'
+ """.format(idpId)
+ if spId:
+ sp_subquery = """
+ AND serviceid = '{0}'
+ """.format(spId)
+ if unique_logins == False:
+ logins = session.exec("""
+ select sum(count) as count
+ from statistics_country_hashed WHERE tenenv_id={0}
+ {1} {2} {3}
+ """.format(
+ tenenv_id, interval_subquery, idp_subquery, sp_subquery
+ )
+ ).all()
+ else:
+ logins = session.exec("""
+ select count(DISTINCT hasheduserid) as count
+ from statistics_country_hashed WHERE tenenv_id={0} AND hasheduserid != 'unknown'
+ {1} {2} {3}
+ """.format(
+ tenenv_id, interval_subquery, idp_subquery, sp_subquery
+ )
+ ).all()
+ return logins
+
+@router.get("/logins_groupby/{group_by}")
+async def read_logins_groupby(
+ *,
+ session: Session = Depends(get_session),
+ request: Request,
+ offset: int = 0,
+ group_by: str,
+ idp: str = None,
+ sp: str = None,
+ tenenv_id: int,
+ unique_logins: Union[boolean, None] = False,
+):
+ days_seq_subquery = ""
+ if unique_logins:
+ days_seq_subquery = " AND hasheduserid != 'unknown'"
+ days_seq_table = """
+ with days as (select generate_series(
+ (select date_trunc('day', min(date))
+ from statistics_country_hashed
+ where statistics_country_hashed.tenenv_id = {0} {1})::timestamp,
+ (select date_trunc('day', max(date))
+ from statistics_country_hashed
+ where statistics_country_hashed.tenenv_id = {0} {1}),
+ '1 day'::interval
+ ) as day)
+ """.format(tenenv_id, days_seq_subquery)
+
+ interval_subquery = ""
+ if idp != None:
+ # Is the user authenticated?
+ AuthNZCheck("logins", False)
+
+ # Fetch the data
+ interval_subquery = """
+ JOIN identityprovidersmap ON sourceidpid=identityprovidersmap.id
+ AND identityprovidersmap.tenenv_id=statistics_country_hashed.tenenv_id
+ WHERE identityprovidersmap.id = '{0}'
+ """.format(idp)
+ elif sp != None:
+ # Is the user authenticated?
+ AuthNZCheck("logins", False)
+
+ # Fetch the data
+ interval_subquery = """
+ JOIN serviceprovidersmap ON serviceid=serviceprovidersmap.id
+ AND serviceprovidersmap.tenenv_id=statistics_country_hashed.tenenv_id
+ WHERE serviceprovidersmap.id = '{0}'
+ """.format(sp)
+ if interval_subquery == "":
+ interval_subquery = (
+ """WHERE statistics_country_hashed.tenenv_id = {0}""".format(tenenv_id)
+ )
+ else:
+ interval_subquery += (
+ """ AND statistics_country_hashed.tenenv_id = {0} """.format(tenenv_id)
+ )
+ logins_count_raw = """
+ select sum(count) as count, date_trunc('{0}', date) as date
+ from statistics_country_hashed
+ {1}
+ GROUP BY date_trunc('{0}', date)
+ ORDER BY date_trunc('{0}', date) ASC
+ """.format(group_by, interval_subquery)
+ if unique_logins is True:
+ logins_count_raw = """
+ select count(DISTINCT hasheduserid) as count, date_trunc('{0}', date) as date
+ from statistics_country_hashed
+ {1} {2}
+ GROUP BY date_trunc('{0}', date)
+ ORDER BY date_trunc('{0}', date) ASC
+ """.format(group_by, interval_subquery, days_seq_subquery)
+
+ # print("""
+ # {0},
+ # logins_count as ({1})
+ # select days.day as date, logins_count.count as count
+ # from days left join logins_count on logins_count.date = days.day
+ # ORDER BY date ASC;
+ # """.format(days_seq_table, logins_count_raw))
+
+ logins = session.exec("""
+ {0},
+ logins_count as ({1})
+ select days.day as date, logins_count.count as count
+ from days left join logins_count on logins_count.date = days.day
+ ORDER BY date ASC;
+ """.format(days_seq_table, logins_count_raw)).all()
+ return logins
diff --git a/app/routers/users.py b/app/routers/users.py
new file mode 100644
index 0000000..2daa268
--- /dev/null
+++ b/app/routers/users.py
@@ -0,0 +1,173 @@
+from fastapi import APIRouter, Depends, HTTPException
+from sqlmodel import Field, Session, SQLModel, create_engine, select
+from typing import Union
+
+from app.database import get_session
+from app.utils.globalMethods import AuthNZCheck
+
+
+# from ..dependencies import get_token_header
+
+router = APIRouter(
+ tags=["users"],
+ dependencies=[Depends(AuthNZCheck("registered_users"))],
+ # responses={404: {"description": "Not found"}},
+)
+
+@router.get("/min_date_registered_users")
+async def read_min_date_registered_users(
+ *,
+ session: Session = Depends(get_session),
+ tenenv_id: int
+):
+ min_date = session.exec("""
+ SELECT min(created) as min_date
+ FROM users
+ """).one()
+ return min_date
+
+@router.get("/registered_users_country")
+async def read_users_country(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0,
+ startDate: str = None,
+ endDate: str = None,
+ tenenv_id: int
+):
+ interval_subquery = ""
+ if startDate and endDate:
+ interval_subquery = """
+ WHERE users.created BETWEEN '{0}' AND '{1}'
+ """.format(startDate, endDate)
+ users_countries = session.exec(
+ """WITH users_countries AS (
+ SELECT statistics_country_hashed.hasheduserid as userid, country, countrycode, count(*) as sum_count
+ FROM statistics_country_hashed
+ JOIN country_codes ON countryid=country_codes.id
+ WHERE tenenv_id = {1}
+ GROUP BY userid, country, countrycode
+ ),
+ max_count_users_countries AS (
+ SELECT DISTINCT userid, max(sum_count) as max_sum_count,row_number() OVER (ORDER BY userid) as row_number
+ FROM users_countries
+ GROUP BY userid
+ )
+ SELECT country,countrycode, count(*) as sum
+ FROM users_countries
+ JOIN (
+ SELECT userid, max_sum_count, max(row_number)
+ FROM max_count_users_countries GROUP BY userid, max_sum_count
+ ) max_count_users_countries_no_duplicates
+ ON users_countries.userid=max_count_users_countries_no_duplicates.userid
+ AND users_countries.sum_count=max_count_users_countries_no_duplicates.max_sum_count
+ JOIN users ON users.hasheduserid=users_countries.userid AND status='A'
+ {0}
+ GROUP BY country,countrycode
+ ORDER BY country,countrycode
+ """.format(interval_subquery, tenenv_id)).all()
+ return users_countries
+
+
+@router.get("/registered_users_country_group_by/{group_by}")
+async def read_users_country_groupby(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0,
+ group_by: str,
+ startDate: str = None,
+ endDate: str = None,
+ tenenv_id: int
+):
+ if group_by:
+ interval_subquery = ""
+ if startDate and endDate:
+ interval_subquery = """
+ WHERE users.created BETWEEN '{0}' AND '{1}'
+ """.format(startDate, endDate)
+ users = session.exec(
+ """WITH users_countries AS (
+ SELECT statistics_country_hashed.hasheduserid as userid, country, countrycode, count(*) as sum_count
+ FROM statistics_country_hashed
+ JOIN country_codes ON countryid=country_codes.id
+ WHERE tenenv_id = {2}
+ GROUP BY userid, country, countrycode
+ ),
+ max_count_users_countries AS (
+ SELECT DISTINCT userid, max(sum_count) as max_sum_count,row_number() OVER (ORDER BY userid) as row_number
+ FROM users_countries
+ GROUP BY userid
+ )
+ SELECT range_date, min(created_min_date) as min_date, STRING_AGG(country, '|| ') as countries, sum(sum) as count
+ FROM
+ (
+ SELECT date_trunc('{0}', users.created) as range_date, CONCAT(country,': ',count(*)) as country, min(users.created) as created_min_date, count(*) as sum
+ FROM users_countries
+ JOIN (
+ SELECT userid, max_sum_count, max(row_number)
+ FROM max_count_users_countries GROUP BY userid, max_sum_count
+ ) max_count_users_countries_no_duplicates
+ ON users_countries.userid=max_count_users_countries_no_duplicates.userid
+ AND users_countries.sum_count=max_count_users_countries_no_duplicates.max_sum_count
+ JOIN users ON users.hasheduserid=users_countries.userid AND status='A'
+ {1}
+ GROUP BY range_date, country,countrycode
+ ORDER BY range_date, country
+ ) user_country_group_by
+ GROUP BY range_date""".format(group_by, interval_subquery, tenenv_id)).all()
+ return users
+
+
+@router.get("/registered_users_groupby/{group_by}")
+async def read_users_groupby(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0,
+ group_by: str,
+ interval: Union[str, None] = None,
+ count_interval: int = None,
+ startDate: str = None,
+ endDate: str = None,
+ tenenv_id: int
+):
+ interval_subquery = ""
+ if group_by:
+ if interval and count_interval:
+ interval_subquery = """AND created >
+ date_trunc('{0}', CURRENT_DATE) - INTERVAL '{1} {2}'""".format(group_by, count_interval, interval)
+ if startDate and endDate:
+ interval_subquery = """
+ AND created BETWEEN '{0}' AND '{1}'
+ """.format(startDate, endDate)
+ users = session.exec("""
+ select count(*) as count, date_trunc( '{0}', created ) as range_date,
+ min(created) as min_date
+ from users
+ WHERE status = 'A' AND tenenv_id = {1}
+ {2}
+ group by range_date
+ ORDER BY range_date ASC
+ """.format(group_by, tenenv_id, interval_subquery)).all()
+ return users
+
+
+@router.get("/registered_users_countby")
+async def read_users_countby(
+ *,
+ session: Session = Depends(get_session),
+ offset: int = 0,
+ interval: Union[str, None] = None,
+ count_interval: int = None,
+ tenenv_id: int
+):
+ interval_subquery = ""
+ if interval and count_interval:
+ interval_subquery = """AND created >
+ CURRENT_DATE - INTERVAL '{0} {1}'""".format(count_interval, interval)
+
+ users = session.exec("""
+ select count(*) as count
+ from users
+ WHERE status = 'A' AND tenenv_id = {1}
+ {0}""".format(interval_subquery, tenenv_id)).all()
+ return users
diff --git a/app/utils/__init__.py b/app/utils/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/app/utils/configParser.py b/app/utils/configParser.py
new file mode 100644
index 0000000..8de64cc
--- /dev/null
+++ b/app/utils/configParser.py
@@ -0,0 +1,36 @@
+import os
+from configparser import RawConfigParser
+from app.utils.fastapiGlobals import g
+
+# TODO: We need to cache the content of the file
+def getConfig(section='source_database', config_file='config.py'):
+
+ # create a parser
+ parser = RawConfigParser()
+
+ # XXX Since Entitlement contain both colons(:) and equal signs
+ # we will configure the semi colon (;) as a delimiter for
+ # the case of authorize configuration.
+ # Which means that we have to modify the comment prefix as well
+ if "authorize" in config_file:
+ parser = RawConfigParser(delimiters=';', comment_prefixes='%%', )
+ # print(sys.argv[0])
+ # print(os.path.dirname(os.path.abspath(sys.argv[0])))
+ # read config file
+ file_dir = os.path.dirname(os.path.realpath('__file__'))
+ parser.read(os.path.join(file_dir, config_file))
+
+ # get section, default to source_database
+ config = {}
+
+ if parser.has_section(section):
+
+ params = parser.items(section)
+ for param in params:
+ config[param[0]] = param[1]
+
+ else:
+ raise Exception(
+ 'Section {0} not found in the {1} file'.format(section, config_file))
+
+ return config
diff --git a/app/utils/fastapiGlobals.py b/app/utils/fastapiGlobals.py
new file mode 100644
index 0000000..548b922
--- /dev/null
+++ b/app/utils/fastapiGlobals.py
@@ -0,0 +1,138 @@
+"""
+This allows to use global variables inside the FastAPI application using async mode.
+
+# Usage
+
+Just import `g` and then access (set/get) attributes of it:
+```python
+from your_project.globals import g
+
+
+g.foo = "foo"
+
+# In some other code
+assert g.foo == "foo"
+```
+
+Best way to utilize the global `g` in your code is to set the desired
+value in a FastAPI dependency, like so:
+```python
+async def set_global_foo() -> None:
+ g.foo = "foo"
+
+
+@app.get("/test/", dependencies=[Depends(set_global_foo)])
+async def test():
+ assert g.foo == "foo"
+```
+
+# Setup
+
+Add the `GlobalsMiddleware` to your app:
+```python
+app = fastapi.FastAPI(
+ title="Your app API",
+)
+app.add_middleware(GlobalsMiddleware) # <-- This line is necessary
+```
+
+Then just use it. ;-)
+
+# Default values
+
+You may use `g.set_default("name", some_value)` to set a default value
+for a global variable. This default value will then be used instead of `None`
+when the variable is accessed before it was set.
+
+Note that default values should only be set at startup time, never
+inside dependencies or similar. Otherwise you may run into the issue that
+the value was already used any thus have a value of `None` set already, which
+would result in the default value not being used.
+"""
+from collections.abc import Awaitable, Callable
+from contextvars import ContextVar, copy_context
+from typing import Any
+
+from fastapi import Request, Response
+from starlette.middleware.base import BaseHTTPMiddleware
+from starlette.types import ASGIApp
+
+
+class Globals:
+ __slots__ = ("_vars", "_defaults")
+
+ _vars: dict[str, ContextVar]
+ _defaults: dict[str, Any]
+
+ def __init__(self) -> None:
+ object.__setattr__(self, '_vars', {})
+ object.__setattr__(self, '_defaults', {})
+
+ def set_default(self, name: str, default: Any) -> None:
+ """Set a default value for a variable."""
+
+ # Ignore if default is already set and is the same value
+ if (
+ name in self._defaults
+ and default is self._defaults[name]
+ ):
+ return
+
+ # Ensure we don't have a value set already - the default will have
+ # no effect then
+ if name in self._vars:
+ raise RuntimeError(
+ f"Cannot set default as variable {name} was already set",
+ )
+
+ # Set the default already!
+ self._defaults[name] = default
+
+ def _get_default_value(self, name: str) -> Any:
+ """Get the default value for a variable."""
+
+ default = self._defaults.get(name, None)
+
+ return default() if callable(default) else default
+
+ def _ensure_var(self, name: str) -> None:
+ """Ensure a ContextVar exists for a variable."""
+
+ if name not in self._vars:
+ default = self._get_default_value(name)
+ self._vars[name] = ContextVar(f"globals:{name}", default=default)
+
+ def __getattr__(self, name: str) -> Any:
+ """Get the value of a variable."""
+
+ self._ensure_var(name)
+ return self._vars[name].get()
+
+ def __setattr__(self, name: str, value: Any) -> None:
+ """Set the value of a variable."""
+
+ self._ensure_var(name)
+ self._vars[name].set(value)
+
+
+async def globals_middleware_dispatch(
+ request: Request,
+ call_next: Callable,
+) -> Response:
+ """Dispatch the request in a new context to allow globals to be used."""
+
+ ctx = copy_context()
+
+ def _call_next() -> Awaitable[Response]:
+ return call_next(request)
+
+ return await ctx.run(_call_next)
+
+
+class GlobalsMiddleware(BaseHTTPMiddleware): # noqa
+ """Middleware to setup the globals context using globals_middleware_dispatch()."""
+
+ def __init__(self, app: ASGIApp) -> None:
+ super().__init__(app, globals_middleware_dispatch)
+
+g = Globals()
\ No newline at end of file
diff --git a/app/utils/globalMethods.py b/app/utils/globalMethods.py
new file mode 100644
index 0000000..6e4c202
--- /dev/null
+++ b/app/utils/globalMethods.py
@@ -0,0 +1,154 @@
+from pprint import pprint
+import requests as reqs
+from fastapi import Depends, FastAPI, HTTPException, Query, Request, HTTPException, status, Response
+import json, jwt
+
+from app.utils import configParser
+from authlib.integrations.starlette_client import OAuth, OAuthError
+from app.utils.fastapiGlobals import g
+
+
+# https://www.fastapitutorial.com/blog/class-based-dependency-injection/
+class AuthNZCheck:
+ def __init__(self, tag: str = "", skip: bool = False):
+ self.skip = skip
+ self.tag = tag
+ self.oauth = OAuth()
+
+ async def __call__(self, request: Request, response: Response):
+ # config
+ authorize_file = 'authorize.' + g.tenant + '.' + g.environment + '.py'
+ config_file = 'config.' + g.tenant + '.' + g.environment + '.py'
+ oidc_config = configParser.getConfig('oidc_client', config_file)
+
+ self.oauth.register(
+ 'rciam',
+ client_id=oidc_config['client_id'],
+ client_secret=oidc_config['client_secret'],
+ server_metadata_url=oidc_config['issuer'] + "/.well-known/openid-configuration",
+ client_kwargs={'scope': 'openid profile email voperson_id eduperson_entitlement'}
+ )
+
+ response.headers["Access-Control-Expose-Headers"] = "X-Permissions, X-Authenticated, X-Redirect"
+
+ # permissions calculation
+ access_token = request.headers.get('x-access-token')
+ rciam = self.oauth.create_client('rciam')
+ metadata = await rciam.load_server_metadata()
+
+ headers = {'Authorization': f'Bearer {access_token}'}
+ resp = reqs.get(metadata['userinfo_endpoint'], headers=headers)
+
+ # Authentication
+ if resp.status_code == 401:
+ # For now we skip logins and dashboard routes
+ if (self.tag == 'logins' or self.tag == 'dashboard') and self.skip:
+ permissions = permissionsCalculation(authorize_file)
+ permissions_json = json.dumps(permissions).replace(" ", "").replace("\n", "")
+ # pprint(permissions_json)
+ response.headers["X-Permissions"] = permissions_json
+ response.headers["X-Authenticated"] = "false"
+ response.headers["X-Redirect"] = "false"
+ return
+
+ raise HTTPException(
+ status_code=401,
+ detail="Authentication failed",
+ headers={
+ "X-Authenticated": "false",
+ "X-Redirect": "true",
+ "Access-Control-Expose-Headers": "X-Permissions, X-Authenticated, X-Redirect"
+ }
+ )
+ else:
+ try:
+ resp.raise_for_status()
+ data = resp.json()
+ except Exception as er:
+ # TODO: Log here
+ raise HTTPException(status_code=500)
+
+ # Authorization
+ permissions = permissionsCalculation(authorize_file, data)
+ permissions_json = json.dumps(permissions).replace(" ", "").replace("\n", "")
+
+ # Add the permission to a custom header field
+ response.headers["X-Permissions"] = permissions_json
+ response.headers["X-Authenticated"] = "true"
+
+ if bool(self.tag):
+ # Currently we only care about view
+ if permissions['actions'][self.tag]['view'] == False:
+ HTTPException(status_code=403)
+
+
+def permissionsCalculation(authorize_file, user_info=None):
+ entitlements_config = configParser.getConfig('entitlements', authorize_file)
+ user_entitlements = {}
+ if user_info is not None:
+ user_entitlements = user_info.get('eduperson_entitlement')
+
+ roles = {
+ 'anonymous': True,
+ 'authenticated': False,
+ 'administrator': False
+ }
+
+ for ent, role in entitlements_config.items():
+ if user_entitlements is not None and ent in user_entitlements:
+ # Reset the default anonymous role
+ roles['anonymous'] = False
+ # The role might be a csv list. So we need to
+ # explode and act accordingly
+ for item_role in role.split(","):
+ roles[item_role] = True
+
+ # pprint(roles)
+
+ actions = {
+ 'dashboard': {
+ 'view': False,
+ 'write': False
+ },
+ 'identity_providers': {
+ 'view': False,
+ 'write': False
+ },
+ 'service_providers': {
+ 'view': False,
+ 'write': False
+ },
+ 'logins': {
+ 'view': True,
+ 'write': True
+ },
+ 'registered_users': {
+ 'view': False,
+ 'write': False
+ },
+ 'communities': {
+ 'view': False,
+ 'write': False
+ },
+ 'statistics_raw': {
+ 'views': False,
+ 'write': False,
+ }
+ }
+
+ for role in roles.keys():
+ if roles[role]:
+ role_actions = configParser.getConfig(role, authorize_file)
+ for view, config_actions in role_actions.items():
+ for item in config_actions.split(","):
+ actions[view][item] = True
+ return {
+ 'roles': roles,
+ 'actions': actions
+ }
+
+
+def hasAction(user_actions, category, action):
+ if (user_actions[category][action] is True):
+ return True
+ return False
diff --git a/app/utils/ipDatabase.py b/app/utils/ipDatabase.py
new file mode 100644
index 0000000..19a1be7
--- /dev/null
+++ b/app/utils/ipDatabase.py
@@ -0,0 +1,20 @@
+from abc import ABC, abstractmethod
+from app.utils import configParser
+import geoip2.database
+
+
+class ipDatabase(ABC):
+ DBFILENAME = configParser.getConfig('ip_database_file', 'config.global.py')['db_filename']
+
+ @abstractmethod
+ def getCountryFromIp(self):
+ pass
+
+
+class geoip2Database(ipDatabase):
+ @classmethod
+ def getCountryFromIp(self, ip):
+ gi = geoip2.database.Reader("""./app/ip_databases/{0}"""
+ .format(ipDatabase.DBFILENAME))
+ return [gi.country(ip).country.iso_code, gi.country(ip).country.name]
+
diff --git a/docker-compose.yml b/docker-compose.yml
index 15a2c64..a67e721 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -26,8 +26,8 @@ services:
image: metricsrciam:latest
container_name: metrcis.rciam.fastapi
environment:
- - PATH=$PATH:.local/bin
- DATABASE_URL=postgresql+psycopg2://rciam:secret@db/metrics_dev
+ - API_ENVIRONMENT=dev
command: uvicorn app.main:app --reload --workers 1 --host 0.0.0.0 --port 8000
ports:
- "8004:8000"
@@ -47,6 +47,7 @@ services:
- "3300:3000"
volumes:
- ./javascript:/app
+ # - ./javascript/node_modules
tty: true
volumes:
diff --git a/javascript/.gitignore b/javascript/.gitignore
index e22e040..e80f931 100644
--- a/javascript/.gitignore
+++ b/javascript/.gitignore
@@ -23,3 +23,4 @@ yarn-debug.log*
yarn-error.log*
.npm/**
package-lock.json
+/src/*.json
diff --git a/javascript/CHANGELOG.md b/javascript/CHANGELOG.md
new file mode 100644
index 0000000..7463db7
--- /dev/null
+++ b/javascript/CHANGELOG.md
@@ -0,0 +1,4 @@
+### Changelog
+
+## Prerelease logging
+- Development RC phase
\ No newline at end of file
diff --git a/javascript/package.json b/javascript/package.json
index c9958ed..224a9bc 100644
--- a/javascript/package.json
+++ b/javascript/package.json
@@ -6,9 +6,9 @@
"@date-io/moment": "^2.14.0",
"@emotion/react": "^11.9.0",
"@emotion/styled": "^11.8.1",
- "@fortawesome/fontawesome-svg-core": "^6.1.1",
- "@fortawesome/free-solid-svg-icons": "^6.1.1",
- "@fortawesome/react-fontawesome": "^0.1.18",
+ "@fortawesome/fontawesome-svg-core": "^6.3.0",
+ "@fortawesome/free-solid-svg-icons": "^6.3.0",
+ "@fortawesome/react-fontawesome": "^0.2.0",
"@hookform/error-message": "^2.0.0",
"@hookform/resolvers": "^2.8.10",
"@mui/icons-material": "^5.8.0",
@@ -18,19 +18,49 @@
"@testing-library/react": "^13.2.0",
"@testing-library/user-event": "^13.5.0",
"axios": "^0.27.2",
+ "bootstrap": "^5.2.2",
+ "datatable": "^2.0.2",
+ "datatables.net": "^1.13.1",
+ "datatables.net-buttons": "^2.3.2",
+ "datatables.net-buttons-dt": "^2.3.3",
+ "datatables.net-dt": "^1.13.1",
+ "dateformat": "^5.0.3",
+ "html-react-parser": "^3.0.8",
+ "i18next": "^22.4.9",
+ "i18next-browser-languagedetector": "^7.0.1",
+ "i18next-http-backend": "^2.1.1",
+ "jquery": "^3.6.1",
+ "jquery-mapael": "^2.2.0",
+ "json-loader": "^0.5.7",
+ "jwt-decode": "^3.1.2",
"moment": "^2.29.3",
+ "pdfmake": "^0.2.6",
"react": "^18.1.0",
+ "react-bootstrap": "^2.7.2",
+ "react-bootstrap-sidebar-menu": "^2.0.3",
+ "react-cookie": "^4.1.1",
"react-date-picker": "^8.4.0",
"react-datepicker": "^4.8.0",
"react-dom": "^18.1.0",
+ "react-dropdown": "^1.11.0",
+ "react-google-charts": "^4.0.0",
"react-hook-form": "^7.31.1",
+ "react-i18next": "^12.1.5",
"react-query": "^3.39.0",
"react-router-dom": "^6.3.0",
"react-scripts": "5.0.1",
- "react-toastify": "^9.0.1",
+ "react-select": "^5.6.1",
+ "react-tabs": "^6.0.0",
+ "react-toastify": "^9.1.1",
+ "react-tooltip": "^4.5.0",
+ "sass": "^1.58.0",
"web-vitals": "^2.1.4",
"yup": "^0.32.11"
},
+ "devDependencies": {
+ "@babel/plugin-proposal-private-property-in-object": "*"
+ },
+ "homepage": ".",
"scripts": {
"start": "react-scripts start",
"build": "react-scripts build",
diff --git a/javascript/public/index.html b/javascript/public/index.html
new file mode 100644
index 0000000..10b7a22
--- /dev/null
+++ b/javascript/public/index.html
@@ -0,0 +1,23 @@
+
+
+
+
+
+
+
+
+
+
+ Rciam Metrics
+
+
+ You need to enable JavaScript to run this app.
+
+
+
diff --git a/javascript/src/App.jsx b/javascript/src/App.jsx
index f71f02d..1ff758c 100644
--- a/javascript/src/App.jsx
+++ b/javascript/src/App.jsx
@@ -1,25 +1,95 @@
+import React, {useState, useEffect} from "react";
+import {Route, Routes} from 'react-router-dom'
+import Communities from "./Pages/Communities";
+import Users from "./Pages/Users";
+import Dashboard from "./Pages/Dashboard";
+import Idps from "./Pages/Idps";
+import Sps from "./Pages/Sps";
+import Sp from "./Pages/Sps/sp";
+import Idp from "./Pages/Idps/idp";
+import Login from "./Pages/Authentication/Login";
import "./app.css";
-import {BrowserRouter as Router, Routes, Route} from "react-router-dom";
-import Login from "./Pages/Login";
-import Register from "./Pages/Register";
+import "./style.scss";
+import 'react-toastify/dist/ReactToastify.css';
+import jwt_decode from "jwt-decode";
+import {
+ languageContext,
+ userinfoContext
+} from "./Context/context";
+import Layout from "./components/Common/layout";
+import SideNav from "./components/Common/sideNav";
+import Main from "./components/Common/main";
+import {ToastContainer} from "react-toastify";
import ErrorPage from "./Pages/Error";
-import {QueryClient, QueryClientProvider} from 'react-query'
+import {useCookies} from 'react-cookie';
+import {toast} from 'react-toastify';
+import Middleware from "./components/Common/middleware"
+
function App() {
- const queryClient = new QueryClient()
-
- return (
-
-
-
- }/>
- }/>
- }/>
- }/>
-
-
-
- );
+ const [language, setLanguage] = useState('en')
+ const [userInfo, setUserInfo] = useState(null)
+ const [permissions, setPermissions] = useState(null)
+ const [cookies, setCookie] = useCookies();
+
+ useEffect(() => {
+ if (cookies.userinfo != undefined) {
+ setUserInfo(jwt_decode(cookies.userinfo))
+ }
+ if (cookies.permissions != undefined) {
+ // The backend will send an encoded permissions while
+ // the frontend adds a simple json value
+ try {
+ setPermissions(jwt_decode(cookies.permissions))
+ } catch (error) {
+ setPermissions(cookies.permissions)
+ }
+ }
+ }, [cookies.userinfo, cookies.permissions])
+
+ useEffect(() => {
+ if (userInfo != undefined) {
+ toast.info(`Welcome ${userInfo.name}`)
+ }
+ }, [userInfo])
+
+ return (
+
+
+
+
+
+
+
+
+
+
+
+ );
}
+function AppRoutes() {
+ return (
+
+ }/>
+ }/>
+ }/>
+ }/>
+ }/>
+ }/>
+ }/>
+ }/>
+ }/>
+
+ )
+}
+
+
export default App;
\ No newline at end of file
diff --git a/javascript/src/Context/UserContext.js b/javascript/src/Context/UserContext.js
index 4111568..6c1e7cb 100644
--- a/javascript/src/Context/UserContext.js
+++ b/javascript/src/Context/UserContext.js
@@ -1,3 +1,3 @@
-import { createContext } from "react";
+import React, { createContext } from "react";
export const UserContext = createContext(null);
diff --git a/javascript/src/Context/UserProvider.js b/javascript/src/Context/UserProvider.js
index 0eabaca..91074de 100644
--- a/javascript/src/Context/UserProvider.js
+++ b/javascript/src/Context/UserProvider.js
@@ -1,5 +1,4 @@
-import * as React from 'react'
-import {useState} from "react";
+import React, {useState} from 'react'
import {UserContext} from "./UserContext";
diff --git a/javascript/src/Context/context.js b/javascript/src/Context/context.js
new file mode 100644
index 0000000..42416e6
--- /dev/null
+++ b/javascript/src/Context/context.js
@@ -0,0 +1,5 @@
+import React from 'react';
+
+export const userContext = React.createContext();
+export const languageContext = React.createContext();
+export const userinfoContext = React.createContext();
\ No newline at end of file
diff --git a/javascript/src/Context/provider.js b/javascript/src/Context/provider.js
new file mode 100644
index 0000000..d09c763
--- /dev/null
+++ b/javascript/src/Context/provider.js
@@ -0,0 +1,23 @@
+import * as React from 'react'
+import {BrowserRouter as Router} from 'react-router-dom'
+import {QueryClient, QueryClientProvider} from 'react-query'
+import {CookiesProvider} from 'react-cookie';
+
+function AppProviders({children}) {
+
+ const queryClient = new QueryClient()
+
+ return (
+
+
+
+ {children}
+
+
+
+ )
+}
+
+export {
+ AppProviders
+}
\ No newline at end of file
diff --git a/javascript/src/Pages/Authentication/Login.js b/javascript/src/Pages/Authentication/Login.js
new file mode 100644
index 0000000..0975665
--- /dev/null
+++ b/javascript/src/Pages/Authentication/Login.js
@@ -0,0 +1,28 @@
+import React from "react";
+import Button from "react-bootstrap/Button";
+import {useTranslation} from "react-i18next";
+import {useCookies} from 'react-cookie';
+import config from '../../config.json'
+
+function Login() {
+ const {t, i18n} = useTranslation();
+ const [cookies, setCookie] = useCookies(['login_start']);
+
+ const handleLoginClick = () => {
+ // Set a cookie with the current location so the backend knows where to go
+ setCookie('login_start', window.location.href, {path: '/'});
+ // This is not a request but a redirect. So i will include the x-keys here
+ setCookie('x-tenant', config.tenant, {path: '/'});
+ setCookie('x-environment', config.environment, {path: '/'});
+ // Redirect to the login endpoint
+ window.location.href = config?.login_url
+ }
+
+ return (
+ {t('login')}
+ )
+}
+
+export default Login
\ No newline at end of file
diff --git a/javascript/src/Pages/Communities/index.js b/javascript/src/Pages/Communities/index.js
new file mode 100644
index 0000000..e9e43f7
--- /dev/null
+++ b/javascript/src/Pages/Communities/index.js
@@ -0,0 +1,62 @@
+import React, {useState, useEffect} from "react";
+import {useQuery} from 'react-query';
+import Container from "react-bootstrap/Container";
+import CommunitiesChart from "../../components/Communities/communitiesChart";
+import CommunitiesDataTable from "../../components/Communities/communitiesDataTable";
+import CommunitiesMap from "../../components/Communities/communitiesMap";
+import Header from "../../components/Common/header";
+import Footer from "../../components/Common/footer";
+import Spinner from "../../components/Common/spinner"
+import Col from 'react-bootstrap/Col';
+import Row from 'react-bootstrap/Row';
+import {tenenvKey} from '../../utils/queryKeys'
+import {getTenenv} from '../../utils/queries'
+import {useCookies} from "react-cookie";
+
+const Communities = () => {
+ const [tenenvId, setTenenvId] = useState(0);
+ const [cookies, setCookie] = useCookies();
+
+ const tenant = cookies['x-tenant']
+ const environment = cookies['x-environment']
+
+
+ const tenenv = useQuery(
+ [tenenvKey, {tenantId: tenant, environment: environment}],
+ getTenenv, {
+ retry: 0,
+ })
+
+ useEffect(() => {
+ setTenenvId(tenenv?.data?.[0]?.id)
+ }, [!tenenv.isLoading
+ && tenenv.isSuccess
+ && !tenenv.isFetching])
+
+ if(tenenv.isLoading
+ || tenenv.isFetching) {
+ return ( )
+ }
+
+ if (tenenvId == undefined
+ || tenenvId == 0
+ || tenenvId == "") {
+ return null
+ }
+
+ return (
+
+
+
+
+ Communities
+
+
+
+
+
+
+ )
+
+}
+export default Communities;
diff --git a/javascript/src/Pages/Dashboard/index.js b/javascript/src/Pages/Dashboard/index.js
new file mode 100644
index 0000000..3e9b1a5
--- /dev/null
+++ b/javascript/src/Pages/Dashboard/index.js
@@ -0,0 +1,112 @@
+import React, {useState, useEffect} from "react";
+import Form from 'react-bootstrap/Form';
+import LoginDataTable from "../../components/Dashboard/loginDataTable";
+import LoginIdpPieChart from "../../components/Dashboard/loginIdpPieChart";
+import LoginLineChart from "../../components/Dashboard/loginLineChart";
+import LoginsMap from "../../components/Dashboard/loginsMap";
+import LoginSpPieChart from "../../components/Dashboard/loginSpPieChart";
+import LoginTiles from "../../components/Dashboard/loginTiles";
+import Header from "../../components/Common/header";
+import Footer from "../../components/Common/footer";
+import Col from 'react-bootstrap/Col';
+import Row from 'react-bootstrap/Row';
+import {Container} from "react-bootstrap";
+import {useQuery} from "react-query";
+import {tenenvKey} from "../../utils/queryKeys";
+import {getTenenv} from "../../utils/queries";
+import {useNavigate} from "react-router-dom";
+import { formatEndDate, formatStartDate } from "../../components/Common/utils";
+import {useCookies} from "react-cookie";
+
+const Dashboard = () => {
+ const oneYearAgo = new Date();
+ oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
+ formatStartDate(oneYearAgo)
+
+ const today = new Date();
+ today.setDate(today.getDate() - 1);
+ formatEndDate(today)
+
+ const [startDate, setStartDate] = useState(oneYearAgo);
+ const [endDate, setEndDate] = useState(today);
+ const [minDate, setMinDate] = useState(null);
+ const [uniqueLogins, setUniqueLogins] = useState(false);
+ const [tenenvId, setTenenvId] = useState(0);
+ const [cookies, setCookie] = useCookies();
+ const tenant = cookies['x-tenant']
+ const environment = cookies['x-environment']
+
+ const tenenv = useQuery(
+ [tenenvKey, {tenantId: tenant, environment: environment}],
+ getTenenv, {
+ retry: 0,
+ refetchOnWindowFocus: false
+ })
+
+ useEffect(() => {
+ setTenenvId(tenenv?.data?.[0]?.id)
+ }, [!tenenv.isLoading
+ && tenenv.isSuccess
+ && !tenenv.isFetching])
+
+ const handleChange = event => {
+ setUniqueLogins(event.target.checked);
+ }
+
+ let navigate = useNavigate();
+
+ const goToSpecificProvider = (id, provider) => {
+ const path = provider === "sp" ?
+ `/metrics/services/${id}` :
+ `/metrics/identity-providers/${id}`
+ navigate(path);
+ }
+
+ if (tenenvId == undefined
+ || tenenvId == 0
+ || tenenvId == "") {
+ return
+ }
+
+ return (
+
+
+
+
+ Dashboard
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ )
+
+}
+export default Dashboard;
diff --git a/javascript/src/Pages/Idps/idp.js b/javascript/src/Pages/Idps/idp.js
new file mode 100644
index 0000000..0aa006e
--- /dev/null
+++ b/javascript/src/Pages/Idps/idp.js
@@ -0,0 +1,133 @@
+import React, {useState, useEffect} from "react";
+import {useNavigate, useParams} from "react-router-dom";
+import {Tab, Tabs, TabList, TabPanel} from 'react-tabs';
+import LoginLineChart from "../../components/Dashboard/loginLineChart";
+import LoginSpPieChart from "../../components/Dashboard/loginSpPieChart";
+import LoginTiles from "../../components/Dashboard/loginTiles";
+import SpsDataTable from "../../components/Sps/spsDataTable";
+import Form from 'react-bootstrap/Form';
+import Container from "react-bootstrap/Container";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import EntityInfoIdp from "../../components/Common/entityInfoIdp";
+import IdpMap from "../../components/Idps/idpMap";
+import IdpMapToDataTable from "../../components/Idps/idpMapToDataTable";
+import Header from "../../components/Common/header";
+import 'react-tabs/style/react-tabs.css';
+import {useQuery} from "react-query";
+import {tenenvKey} from "../../utils/queryKeys";
+import {getTenenv} from "../../utils/queries";
+import {useCookies} from "react-cookie";
+
+const Idp = () => {
+ const {id} = useParams();
+ const [tenenvId, setTenenvId] = useState(0);
+ const [uniqueLogins, setUniqueLogins] = useState(false);
+ const [startDate, setStartDate] = useState(null);
+ const [endDate, setEndDate] = useState(null);
+ const [cookies, setCookie] = useCookies();
+
+ const tenant = cookies['x-tenant']
+ const environment = cookies['x-environment']
+
+
+ const tenenv = useQuery(
+ [tenenvKey, {tenantId: tenant, environment: environment}],
+ getTenenv, {
+ retry: 0,
+ })
+
+
+ useEffect(() => {
+ if (!!tenenv?.data?.[0]?.id) {
+ setTenenvId(tenenv?.data?.[0]?.id)
+ }
+ }, [!tenenv.isLoading
+ && tenenv.isSuccess
+ && !tenenv.isFetching])
+
+ const handleChange = event => {
+ setUniqueLogins(event.target.checked);
+ }
+
+ let navigate = useNavigate();
+
+ const goToSpecificProvider = (id, provider) => {
+ const path = provider === "sp" ?
+ `/metrics/services/${id}` :
+ `/metrics/identity-providers/${id}`
+ navigate(path);
+ }
+
+ if (tenenvId == undefined || tenenvId == 0 || tenenvId == "") return;
+
+ return (
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {/* TODO: MOVE THE FOLLOWING SECTION TO ITS OWN ELEMENT. IT NEEDS TO RELOAD EVERY
+ TIME WE PICK A NEW DATE*/}
+ <>
+
+
+
+ Map
+ Datatable
+
+
+
+
+
+
+
+
+
+ >
+
+ )
+}
+
+export default Idp
\ No newline at end of file
diff --git a/javascript/src/Pages/Idps/index.js b/javascript/src/Pages/Idps/index.js
new file mode 100644
index 0000000..be751a5
--- /dev/null
+++ b/javascript/src/Pages/Idps/index.js
@@ -0,0 +1,95 @@
+import React, {useState, useEffect} from "react";
+import Container from "react-bootstrap/Container";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import Form from 'react-bootstrap/Form';
+import LoginIdpPieChart from "../../components/Dashboard/loginIdpPieChart";
+import LoginTiles from "../../components/Dashboard/loginTiles";
+import IdpsDataTable from "../../components/Idps/idpsDataTable";
+import Header from "../../components/Common/header";
+import {useQuery} from "react-query";
+import {tenenvKey} from "../../utils/queryKeys";
+import {getTenenv} from "../../utils/queries";
+import {useNavigate} from "react-router-dom";
+import {formatStartDate, formatEndDate} from "../../components/Common/utils";
+import {useCookies} from "react-cookie";
+
+const Idps = () => {
+ const oneYearAgo = new Date();
+ oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
+ formatStartDate(oneYearAgo)
+
+ const today = new Date();
+ today.setDate(today.getDate() - 1);
+ formatEndDate(today)
+ const [uniqueLogins, setUniqueLogins] = useState(false);
+ const [tenenvId, setTenenvId] = useState(0);
+ const [endDate, setEndDate] = useState(today);
+ const [startDate, setStartDate] = useState(oneYearAgo);
+ const [cookies, setCookie] = useCookies();
+
+ const tenant = cookies['x-tenant']
+ const environment = cookies['x-environment']
+
+
+ const tenenv = useQuery(
+ [tenenvKey, {tenantId: tenant, environment: environment}],
+ getTenenv, {
+ retry: 0,
+ })
+
+ useEffect(() => {
+ setTenenvId(tenenv?.data?.[0]?.id)
+ }, [!tenenv.isLoading
+ && tenenv.isSuccess
+ && !tenenv.isFetching])
+
+ const handleChange = event => {
+ setUniqueLogins(event.target.checked);
+ }
+
+ let navigate = useNavigate();
+
+ const goToSpecificProvider = (id, provider) => {
+ const path = provider === "sp" ?
+ `/metrics/services/${id}` :
+ `/metrics/identity-providers/${id}`
+ navigate(path);
+ }
+
+ if (tenenvId == undefined || tenenvId == 0 || tenenvId == "") return
+
+ return (
+
+
+
+
+ Identity Providers Logins
+
+
+
+
+
+
+
+
+
+
+ )
+
+}
+export default Idps;
diff --git a/javascript/src/Pages/Login/index.js b/javascript/src/Pages/Login/index.js
deleted file mode 100644
index 2c5e230..0000000
--- a/javascript/src/Pages/Login/index.js
+++ /dev/null
@@ -1,117 +0,0 @@
-import {useState, useContext, useEffect} from "react";
-import "../../app.css";
-import {Link, useNavigate} from "react-router-dom";
-import {useForm} from 'react-hook-form';
-import {useMutation} from 'react-query';
-import {toast} from 'react-toastify';
-import {loginUser} from "../../utils/queryKeys";
-import {client} from '../../utils/api';
-import {UserContext} from "../../Context/UserContext";
-
-const Login = () => {
- const navigate = useNavigate();
- const {currentUser, setCurrentUser} = useContext(UserContext);
-
- useEffect(() => {
- const stored_user = JSON.parse(localStorage.getItem('logged_in_user'))
- // Retrieve the stored user from the local storage
- if (stored_user != undefined && Object.keys(stored_user).length !== 0) {
- setCurrentUser(stored_user)
- }
- // Check if my user exists
- if (Object.keys(currentUser).length !== 0) {
- navigate("/");
- }
- }, [])
-
- const [values, setValues] = useState({
- email: "",
- password: "",
- });
-
- const onChange = (e) => {
- setValues({...values, [e.target.name]: e.target.value});
- };
-
- // FORM
- const {register, handleSubmit, reset, formState: {errors}} = useForm();
-
- async function postForm(data) {
- const {email, password} = data
-
- return await client.post(loginUser, {
- email: email,
- password: password
- })
- }
-
- const {mutateAsync: sendData} = useMutation(postForm);
-
- const notifyError = () => toast.error("Login Failed.")
-
- const onSubmit = async (data, e) => {
- try {
- const response = await sendData(data)
- setCurrentUser(response.data)
- localStorage.setItem('logged_in_user', JSON.stringify(response.data));
- reset()
- navigate('/')
- } catch (err) {
- notifyError()
- reset()
- }
- }
-
- // ELEMENT
- return (
-
- );
-};
-
-export default Login;
diff --git a/javascript/src/Pages/Register/index.js b/javascript/src/Pages/Register/index.js
deleted file mode 100644
index 03f26bb..0000000
--- a/javascript/src/Pages/Register/index.js
+++ /dev/null
@@ -1,153 +0,0 @@
-import {useState, useContext} from "react";
-import "../../app.css";
-import "../../components/Common/Style/formInput.css";
-import {Link, useNavigate} from "react-router-dom";
-import {useForm} from "react-hook-form";
-import {client} from "../../utils/api";
-import {allUsers} from "../../utils/queryKeys";
-import {useMutation} from "react-query";
-import {toast} from "react-toastify";
-import {UserContext} from "../../Context/UserContext";
-
-const Register = () => {
- const {currentUser, setCurrentUser} = useContext(UserContext);
- const navigate = useNavigate();
-
- const [values, setValues] = useState({
- first_name: "",
- last_name: "",
- email: "",
- password: "",
- });
-
- const onChange = (e) => {
- setValues({...values, [e.target.name]: e.target.value});
- };
-
- // FORM
- const {register, handleSubmit, reset, formState: {errors}} = useForm();
-
- async function postForm(data) {
- const {first_name, last_name, email, password} = data
-
- return await client.post(allUsers, {
- first_name: first_name,
- last_name: last_name,
- email: email,
- password: password
- })
- }
-
- const {mutateAsync: sendData} = useMutation(postForm);
-
- const notifyError = () => toast.error("Registration Failed.")
-
- const onSubmit = async (data, e) => {
- try {
- const response = await sendData(data)
- reset()
- setCurrentUser(response.config.data)
- navigate("/");
- } catch (err) {
- notifyError()
- reset()
- // throw new Error(err)
- }
- }
-
- // ELEMENT
- return (
-
- );
-};
-
-export default Register;
diff --git a/javascript/src/Pages/Sps/index.js b/javascript/src/Pages/Sps/index.js
new file mode 100644
index 0000000..a15c9e1
--- /dev/null
+++ b/javascript/src/Pages/Sps/index.js
@@ -0,0 +1,94 @@
+import React, {useState, useEffect} from "react";
+import Container from "react-bootstrap/Container";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import Form from 'react-bootstrap/Form';
+import LoginSpPieChart from "../../components/Dashboard/loginSpPieChart";
+import LoginTiles from "../../components/Dashboard/loginTiles";
+import SpsDataTable from "../../components/Sps/spsDataTable";
+import Header from "../../components/Common/header";
+import {useQuery} from "react-query";
+import {tenenvKey} from "../../utils/queryKeys";
+import {getTenenv} from "../../utils/queries";
+import {useNavigate} from "react-router-dom";
+import {formatStartDate, formatEndDate} from "../../components/Common/utils";
+import {useCookies} from "react-cookie";
+
+const Sps = () => {
+ const oneYearAgo = new Date();
+ oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
+ formatStartDate(oneYearAgo)
+
+ const today = new Date();
+ today.setDate(today.getDate() - 1);
+ formatEndDate(today)
+ const [uniqueLogins, setUniqueLogins] = useState(false);
+ const [tenenvId, setTenenvId] = useState(0);
+ const [endDate, setEndDate] = useState(today);
+ const [startDate, setStartDate] = useState(oneYearAgo);
+ const [cookies, setCookie] = useCookies();
+
+ const tenant = cookies['x-tenant']
+ const environment = cookies['x-environment']
+
+
+ const tenenv = useQuery(
+ [tenenvKey, {tenantId: tenant, environment: environment}],
+ getTenenv, {
+ retry: 0,
+ })
+
+ useEffect(() => {
+ setTenenvId(tenenv?.data?.[0]?.id)
+ }, [!tenenv.isLoading
+ && tenenv.isSuccess
+ && !tenenv.isFetching])
+
+ const handleChange = event => {
+ setUniqueLogins(event.target.checked);
+ }
+
+ let navigate = useNavigate();
+
+ const goToSpecificProvider = (id, provider) => {
+ const path = provider === "sp" ?
+ `/metrics/services/${id}` :
+ `/metrics/identity-providers/${id}`
+ navigate(path);
+ }
+
+ if (tenenvId == undefined || tenenvId == 0 || tenenvId == "") return
+
+ return (
+
+
+
+
+ Service Providers Logins
+
+
+
+
+
+
+
+
+
+ )
+
+}
+export default Sps;
diff --git a/javascript/src/Pages/Sps/sp.js b/javascript/src/Pages/Sps/sp.js
new file mode 100644
index 0000000..a39d77d
--- /dev/null
+++ b/javascript/src/Pages/Sps/sp.js
@@ -0,0 +1,139 @@
+import React, {useState, useEffect} from "react";
+import {useNavigate, useParams} from "react-router-dom";
+import {Tab, Tabs, TabList, TabPanel} from 'react-tabs';
+import LoginLineChart from "../../components/Dashboard/loginLineChart";
+import LoginTiles from "../../components/Dashboard/loginTiles";
+import Form from 'react-bootstrap/Form';
+import Container from "react-bootstrap/Container";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import EntityInfoSp from "../../components/Common/entityInfoSp";
+import LoginIdpPieChart from "../../components/Dashboard/loginIdpPieChart";
+import IdpsDataTable from "../../components/Idps/idpsDataTable";
+import SpMap from "../../components/Sps/spMap";
+import SpMapToDataTable from "../../components/Sps/spMapToDataTable";
+import Header from "../../components/Common/header";
+import 'react-tabs/style/react-tabs.css';
+import {useQuery} from "react-query";
+import {tenenvKey} from "../../utils/queryKeys";
+import {getTenenv} from "../../utils/queries";
+import Spinner from "../../components/Common/spinner"
+import {useCookies} from "react-cookie";
+
+const Sp = () => {
+ const {id} = useParams();
+ const [tenenvId, setTenenvId] = useState(0);
+ const [uniqueLogins, setUniqueLogins] = useState(false);
+ const [startDate, setStartDate] = useState(null);
+ const [endDate, setEndDate] = useState(null);
+ const [cookies, setCookie] = useCookies();
+
+ const tenant = cookies['x-tenant']
+ const environment = cookies['x-environment']
+
+ const tenenv = useQuery(
+ [tenenvKey, {tenantId: tenant, environment: environment}],
+ getTenenv, {
+ retry: 0,
+ })
+
+
+ useEffect(() => {
+ if (!!tenenv?.data?.[0]?.id) {
+ setTenenvId(tenenv?.data?.[0]?.id)
+ }
+ }, [!tenenv.isLoading
+ && tenenv.isSuccess
+ && !tenenv.isFetching])
+
+ const handleChange = event => {
+ setUniqueLogins(event.target.checked);
+ }
+
+ let navigate = useNavigate();
+
+ const goToSpecificProvider = (id, provider) => {
+ const path = provider === "sp" ?
+ `/metrics/services/${id}` :
+ `/metrics/identity-providers/${id}`
+ navigate(path);
+ }
+
+ if (tenenv.isLoading
+ || tenenv.isFetching) {
+ return ( )
+ }
+
+ if (tenenvId == undefined
+ || tenenvId == 0
+ || tenenvId == "") {
+ return null
+ }
+
+
+ return (
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Map
+ Datatable
+
+
+
+
+
+
+
+
+
+
+ )
+}
+
+export default Sp
\ No newline at end of file
diff --git a/javascript/src/Pages/Users/index.js b/javascript/src/Pages/Users/index.js
new file mode 100644
index 0000000..1e399b1
--- /dev/null
+++ b/javascript/src/Pages/Users/index.js
@@ -0,0 +1,71 @@
+import React, {useState, useContext, useEffect} from "react";
+import Container from "react-bootstrap/Container";
+import RegisteredUsersChart from "../../components/Users/registeredUsersChart";
+import RegisteredUsersDataTable from "../../components/Users/registeredUsersDataTable";
+import RegisteredUsersMap from "../../components/Users/registeredUsersMap";
+import RegisteredUsersTiles from "../../components/Users/registeredUsersTiles";
+import Header from "../../components/Common/header";
+import Footer from "../../components/Common/footer";
+import Col from 'react-bootstrap/Col';
+import Row from 'react-bootstrap/Row';
+import {useQuery} from "react-query";
+import {tenenvKey} from "../../utils/queryKeys";
+import {getTenenv} from "../../utils/queries";
+import {formatStartDate, formatEndDate} from "../../components/Common/utils";
+import {useCookies} from "react-cookie";
+
+const Users = () => {
+ const oneYearAgo = new Date();
+ oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
+ formatStartDate(oneYearAgo)
+
+ const today = new Date();
+ today.setDate(today.getDate() - 1);
+ formatEndDate(today)
+ const [tenenvId, setTenenvId] = useState(0);
+ const [startDate, setStartDate] = useState(oneYearAgo);
+ const [endDate, setEndDate] = useState(today);
+ const [cookies, setCookie] = useCookies();
+
+ const tenant = cookies['x-tenant']
+ const environment = cookies['x-environment']
+
+
+ const tenenv = useQuery(
+ [tenenvKey, {tenantId: tenant, environment: environment}],
+ getTenenv, {
+ retry: 0,
+ })
+
+ useEffect(() => {
+ setTenenvId(tenenv?.data?.[0]?.id)
+ }, [!tenenv.isLoading
+ && tenenv.isSuccess
+ && !tenenv.isFetching])
+
+
+ if (tenenvId == undefined || tenenvId == 0 || tenenvId == "") return
+
+ return (
+
+
+
+
+ Users
+
+
+
+
+
+
+
+ )
+
+}
+export default Users;
diff --git a/javascript/src/app.css b/javascript/src/app.css
index d1e0043..615a5ba 100644
--- a/javascript/src/app.css
+++ b/javascript/src/app.css
@@ -1,8 +1,329 @@
-.app {
+a {
+ text-decoration: none;
+}
+body {
+ color: #212529;
+ font-family: -apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Helvetica Neue,Arial,Noto Sans,Liberation Sans,sans-serif,Apple Color Emoji,Segoe UI Emoji,Segoe UI Symbol,Noto Color Emoji;
+ font-size: 1rem;
+ font-weight: 400;
+ line-height: 10.5;
+ text-align: left;
+}
+
+body .nav-header {
+ color: #4b4b4b;
+ font-size: 15px;
+ font-weight: 300;
+}
+
+.tenenv_logo_container, .ssp-footer {
+ flex: none;
+}
+
+.tenenv_logo_container .ssp-logo {
+ margin-top: 10px;
+}
+
+.tenenv_logo_container .ssp-logo a {
+ display: inline-block;
+ padding: 10px;
+}
+
+.tenenv_logo_container .ssp-logo img {
+ max-height: 60px;
+}
+
+.tenenv_logo_container h1 {
+ font-family: 'Open Sans', sans-serif;
+ font-weight: 300;
+ font-size: 26px;
+ text-transform: none !important;
+ color: #5b5b5b;
+}
+
+/* Development Banner */
+
+.noty-top-global {
display: flex;
+ justify-content: space-between;
+ align-items: center;
+ width: 100%;
+ z-index: 2000;
+ padding: 0.5em;
+}
+
+.noty-top-global svg {
+ margin-right: 0.5rem;
+}
+
+.noty-top-info {
+ color: #00529B;
+ background-color: #BDE5F8;
+}
+
+.noty-top-info, .noty-top-success, .noty-top-warning, .noty-top-error {
+ border-bottom: 1px solid;
+}
+
+.noty-top-error {
+ color: #D8000C;
+ background-color: #FFBABA;
+}
+
+.noty-top-warning {
+ color: #9F6000;
+ background-color: #FEEFB3;
+}
+
+.link-button {
+ color: #774b08;
+ background-color: transparent;
+ border: none;
+ cursor: pointer;
+ text-decoration: underline;
+ display: inline;
+ margin: 0;
+ padding: 0;
+}
+
+.link-button:hover {
+ opacity: 0.7;
+}
+
+/* Navbar */
+
+.navbar-fixed-top {
+ z-index: 30!important;
+}
+
+.navbar-fixed-top, .navbar-fixed-bottom {
+ position: inherit;
+ right: 0;
+ left: 0;
+ z-index: 1030;
+ margin-bottom: 0;
+}
+
+.navbar-fixed-top .navbar-inner {
+ border: 0 none;
+ box-shadow: none;
+ background: transparent;
+}
+
+.drop-menu a {
+ font:300 13px / 23px 'Manrope' !important;
+ padding:0.2rem 1rem!important;
+ }
+
+.drop-menu a svg {
+ margin-left: 0.2rem;
+}
+
+/*.drop-container-header .dropdown-menu {*/
+/* font-size: 4rem!important;*/
+/*}*/
+
+.drop-container-header .dropdown-menu a {
+ padding:0.3rem 1rem!important;
+ font-size:1rem!important;
+}
+
+.dropdown-menu[data-bs-popper] {
+ left: auto !important;
+ right: 0 !important;
+}
+
+.log-button {
+ color: rgb(89, 154, 219)!important;
+ border-color: rgb(89, 154, 219)!important;
+ padding: .2rem .75rem!important;
+}
+
+.log-button:hover {
+ color:white!important;
+}
+
+/* General */
+
+.text-center {
+ text-align: center;
+}
+
+/* Footer */
+
+.navbar .container {
+ width: 100%;
+ max-width: 100%;
+}
+
+.ssp-footer--container {
+ padding: 12px 18px 0.5rem;
+}
+
+.ssp-footer__item {
+ min-height: 60px;
+}
+.ssp-footer__item {
+ font-size: 12px;
+ letter-spacing: 1px;
+ display: table-cell;
+ vertical-align: bottom!important;
+}
+
+#footer {
+ background: #fff;
+ color: #000;
+ font-family: "Helvetica Neue";
+ font-weight: 300;
+ overflow: hidden;
+ padding: 0 30px 15px;
+ text-align: center;
+ min-height: 80px;
+}
+
+#footer a {
+ color: #0a559c;
+ text-decoration: none;
+}
+
+#footer a:hover {
+ text-decoration: underline;
+}
+
+#footer .ssp-footer__item {
+ min-height: 40px;
+ font-size: 12px;
+ display: table-cell;
+ vertical-align: bottom!important;
+}
+
+.ssp-footer__item {
+ display: table-cell;
+ vertical-align: bottom!important;
+}
+
+#footer .ssp-footer__item__logo {
+ height: 40px;
+}
+
+#footer .ssp-footer__item__powered {
+ display: table-cell;
+ vertical-align: bottom;
+}
+
+.ssp-footer__item__powered {
+ margin-top: .5rem;
+ position: relative;
+}
+
+#footer .powered {
+ display: table;
+}
+
+#footer * {
+ font-size: 14px;
+}
+
+#footer .row {
+ padding: 8px;
+}
+
+.ssp-footer__item__logo {
+ height: 40px;
+}
+
+.ssp-footer__item--links a{
+ margin-right: 12px;
+ display: inline-block;
+ line-height: 50px;
+ vertical-align: baseline;
+}
+
+.ssp-footer__item__logo--eu {
+ height: 30px;
+ margin-left: 12px;
+}
+
+#footer .col-images a:hover {
+ text-decoration: none;
+}
+
+@media screen and (max-width: 768px ) {
+ .ssp-footer__item--links {
+ text-align: center;
+ margin: 0 6px;
+ }
+}
+
+.dropup .caret, .navbar-fixed-bottom .dropdown .caret {
+ border-top: 0;
+ border-bottom: 4px dashed;
+
+ content: "";
+}
+.caret {
+ border-left: 4px solid transparent;
+ border-right: 4px solid transparent;
+ border-top: 4px dashed;
+
+ display: inline-block;
+ height: 0;
+ margin-left: 2px;
+ vertical-align: middle;
+ width: 0;
+}
+.ssp-footer__item__lang .dropdown-toggle::after {
+ content:none!important;
+}
+.ssp-footer__item__lang {
+ width:100%;
+ text-align: center;
+}
+
+.ssp-btn {
+ letter-spacing: 1px;
+ font-size: 11px;
+ padding: 15px 18px;
+ line-height: 1.4;
+ border: 1px solid;
+ border-top-color: currentcolor;
+ border-right-color: currentcolor;
+ border-bottom-color: currentcolor;
+ border-left-color: currentcolor;
+ border-radius: 0;
+ margin: 6px 2px;
+ white-space: normal;
+}
+
+.footer-logo-container{
+ /* position:absolute; */
+ bottom:0;
+ width:100%;
+ text-align:center;
+}
+
+.footer_link_container {
+ height: 100%;
+ float:right;
+ text-align: center;
+ width:100%;
+}
+
+.copyright-funding-footer {
+ text-align: center;
+ margin-top: 0.5rem;
+ letter-spacing:0.03rem;
+}
+
+#dropdown-button-drop-up {
+ text-align: center;
+}
+
+/* .app {
+
align-items: center;
justify-content: center;
- height: 100vh;
+
background: linear-gradient(
rgba(255, 255, 255, 0.7),
rgba(255, 255, 255, 0.3)
@@ -10,7 +331,7 @@
url("https://images.pexels.com/photos/114979/pexels-photo-114979.jpeg?auto=compress&cs=tinysrgb&dpr=2&w=500");
background-size: cover;
background-position: center;
-}
+} */
form {
background-color: white;
@@ -62,6 +383,116 @@ button.reg-form {
border-color: #3f4454 transparent #3f4454 transparent;
animation: lds-dual-ring 1.2s linear infinite;
}
+.columnList {
+ margin: 21px 0px 0px 0px;
+ padding-left: 20px;
+ height: 214px;
+ overflow: auto;
+ text-align: left;
+ padding-right: 10px;
+}
+.tooltip {
+ max-width: 200px;
+}
+.dt-buttons {
+ position: relative;
+}
+.range_inputs {
+ display: flex;
+ align-items: center;
+ flex-wrap: wrap;
+}
+.range_inputs input {
+ padding: 0.5em;
+}
+.react-datepicker-wrapper {
+ width: auto!important;
+}
+.react-datepicker__tab-loop {
+ position: absolute!important;
+}
+/* Datatable */
+.table-responsive {
+ overflow-x: unset!important;
+}
+/* Map Style */
+.communityMembersByCountry {
+ min-height: 20em;
+ align-content: baseline;
+}
+.container_map {
+ position: relative;
+}
+.mapael .zoomButton {
+ background-color: #fff;
+ border: 1px solid #ccc;
+ color: #000;
+ width: 15px;
+ height: 15px;
+ line-height: 15px;
+ text-align: center;
+ border-radius: 3px;
+ cursor: pointer;
+ position: absolute;
+ top: 0;
+ font-weight: bold;
+ left: 10px;
+ -webkit-user-select: none;
+ -khtml-user-select: none;
+ -moz-user-select: none;
+ -o-user-select: none;
+ user-select: none;
+}
+
+.mapael .zoomReset {
+ top: 10px;
+}
+
+.mapael .zoomIn {
+ top: 30px;
+}
+
+.mapael .zoomOut {
+ top: 50px;
+}
+
+.mapael .mapTooltip {
+ position: absolute;
+ background-color: #474c4b;
+ moz-opacity: 0.70;
+ opacity: 0.70;
+ filter: alpha(opacity=70);
+ border-radius: 10px;
+ padding: 10px;
+ z-index: 1000;
+ max-width: 200px;
+ display: none;
+ color: #fff;
+}
+.areaLegend {
+ background-color: #f5f5f5;
+ border: 1px solid #e3e3e3;
+ position: absolute;
+ top: 20px;
+ right: 40px;
+ padding: 10px;
+ display: none;
+
+}
+.areaLegend tspan {
+ font: inherit;
+ padding-bottom: 24px;
+ border-bottom: 1px solid #ccc;
+}
+#reactgooglegraph-1 {
+ height:350px!important;
+}
+#googlechart-control-0-1 {
+ height:50px;
+}
+footer {
+ padding-top: 1em;
+}
@keyframes lds-dual-ring {
0% {
transform: rotate(0deg);
@@ -69,4 +500,33 @@ button.reg-form {
100% {
transform: rotate(360deg);
}
+}
+/*Sidebar*/
+.sidebar {
+ position: fixed;
+ top: 0;
+ bottom: 0;
+ left: 0;
+ min-height: 100vh !important;
+ z-index: 100;
+ padding: 48px 0 0;
+ box-shadow: inset -1px 0 0 rgba(0, 0, 0, .1);
+}
+#sidebar-wrapper{
+ min-height: 100vh !important;
+ width: 100vw;
+ margin-left: -1rem;
+ -webkit-transition: margin .25s ease-out;
+ -moz-transition: margin .25s ease-out;
+ -o-transition: margin .25s ease-out;
+ transition: margin .25s ease-out;
+}
+#sidebar-wrapper .sidebar-heading {
+ padding: 0.875rem 1.25rem;
+ font-size: 1.2rem;
+}
+
+#page-content-wrapper {
+ min-width: 0;
+ width: 100%;
}
\ No newline at end of file
diff --git a/javascript/src/components/Common/Style/formInput.css b/javascript/src/components/Common/Style/formInput.css
deleted file mode 100644
index f05b967..0000000
--- a/javascript/src/components/Common/Style/formInput.css
+++ /dev/null
@@ -1,33 +0,0 @@
-.formInput {
- display: flex;
- flex-direction: column;
- width: 280px;
-}
-
-input {
- padding: 1em;
- margin-top: 0.25em;
- border-radius: 5px;
- border: 1px solid gray;
-}
-
-
-label {
- font-size: 12px;
- color: gray;
-}
-
-span.error {
- font-size: 12px;
- padding: 3px;
- color: red;
- margin-bottom: 1em;
-}
-
-input:invalid[focused="true"] {
- border: 1px solid red;
-}
-
-input:invalid[focused="true"] ~ span {
- display: block;
-}
\ No newline at end of file
diff --git a/javascript/src/components/Common/Style/spinner.css b/javascript/src/components/Common/Style/spinner.css
new file mode 100644
index 0000000..5f729d3
--- /dev/null
+++ b/javascript/src/components/Common/Style/spinner.css
@@ -0,0 +1,57 @@
+.lds-ellipsis {
+ position: fixed;
+ top: 50%;
+ left: 50%;
+ width: 80px;
+ height: 80px;
+ z-index: 1000;
+}
+.lds-ellipsis div {
+ position: absolute;
+ top: 33px;
+ width: 13px;
+ height: 13px;
+ border-radius: 50%;
+ background: #666;
+ animation-timing-function: cubic-bezier(0, 1, 1, 0);
+}
+.lds-ellipsis div:nth-child(1) {
+ left: 8px;
+ animation: lds-ellipsis1 0.6s infinite;
+}
+.lds-ellipsis div:nth-child(2) {
+ left: 8px;
+ animation: lds-ellipsis2 0.6s infinite;
+}
+.lds-ellipsis div:nth-child(3) {
+ left: 32px;
+ animation: lds-ellipsis2 0.6s infinite;
+}
+.lds-ellipsis div:nth-child(4) {
+ left: 56px;
+ animation: lds-ellipsis3 0.6s infinite;
+}
+@keyframes lds-ellipsis1 {
+ 0% {
+ transform: scale(0);
+ }
+ 100% {
+ transform: scale(1);
+ }
+}
+@keyframes lds-ellipsis3 {
+ 0% {
+ transform: scale(1);
+ }
+ 100% {
+ transform: scale(0);
+ }
+}
+@keyframes lds-ellipsis2 {
+ 0% {
+ transform: translate(0, 0);
+ }
+ 100% {
+ transform: translate(24px, 0);
+ }
+}
\ No newline at end of file
diff --git a/javascript/src/components/Common/dateRange.js b/javascript/src/components/Common/dateRange.js
new file mode 100644
index 0000000..c858bf1
--- /dev/null
+++ b/javascript/src/components/Common/dateRange.js
@@ -0,0 +1,25 @@
+import {getIdps, getSps} from "../../utils/queries";
+import {idpsKey, spsKey} from "../../utils/queryKeys";
+import {useQuery} from "react-query";
+import Spinner from "./spinner";
+import React from "react";
+
+const DateRange = ({
+ startDate,
+ endDate,
+ minDate,
+ maxDate
+}) => {
+if(minDate == null || maxDate == null)
+ return
+
+return (
+ Number of Logins per Country from {startDate ? (
+ `${startDate.toLocaleDateString("en-GB")} to ${endDate.toLocaleDateString("en-GB")}`
+ ) : (
+ `${minDate.toLocaleDateString("en-GB")} to ${maxDate.toLocaleDateString("en-GB")}`
+ )}
+ )
+}
+
+export default DateRange
\ No newline at end of file
diff --git a/javascript/src/components/Common/earthMap.js b/javascript/src/components/Common/earthMap.js
new file mode 100644
index 0000000..feb6376
--- /dev/null
+++ b/javascript/src/components/Common/earthMap.js
@@ -0,0 +1,91 @@
+import React, {useCallback, useRef} from "react";
+import {
+ calculateLegends,
+ setLegend,
+ setMapConfiguration
+} from "./utils";
+import {StatusEnumeration} from "../../utils/helpers/enums";
+import $ from "jquery";
+
+const EarthMap = ({
+ datasetQuery,
+ tooltipLabel,
+ legendLabel
+ }) => {
+ const areaLegendRef = useRef(null)
+
+ const mapDrawRef = useCallback(node => {
+ if (datasetQuery?.data !== undefined
+ && node !== undefined) {
+ createMap(node, areaLegendRef, datasetQuery?.data, tooltipLabel, legendLabel)
+ }
+ }, [!datasetQuery.isLoading && datasetQuery.isSuccess && datasetQuery?.data])
+
+ // mapData need to be only one table. Not multiple ones.
+ const createMap = (node,
+ areaLegendRef,
+ mapData,
+ tooltipLabel,
+ legendLabel) => {
+ // Calculate tooltip
+ let areas = {};
+ let i = 1;
+ let maxSum = 0;
+
+ const mapStatsData = mapData?.stats ?? mapData
+ const mapStatusData = mapData?.status
+
+
+ mapStatsData?.forEach(function (mapRow) {
+ let contentTooltip = `${mapRow.country} ${tooltipLabel} : ${mapRow.sum}`
+
+ // Handle status
+ let other_status = 0;
+ !!mapStatusData
+ && mapStatusData.forEach(function (status_elem) {
+ if (status_elem.country === mapRow.country) {
+ if (status_elem.status !== 'A' && status_elem.status !== 'GP') {
+ other_status += status_elem.sum
+ } else {
+ contentTooltip += StatusEnumeration[status_elem.status] + ": " + status_elem.sum + " "
+ }
+ }
+
+ })
+ if (other_status > 0) {
+ contentTooltip += StatusEnumeration['O'] + ": " + other_status
+ }
+
+
+ areas[mapRow.countrycode] = {
+ value: mapRow.sum,
+ tooltip: {content: contentTooltip}
+ }
+ if (mapRow.sum > maxSum) {
+ maxSum = mapRow.sum;
+ }
+ i++;
+ })
+
+
+ // Calculate Legends
+ const legends = calculateLegends(maxSum)
+ $(areaLegendRef.current).show()
+ $(node).mapael({
+ map: setMapConfiguration(),
+ legend: setLegend(legendLabel, legends),
+ areas: areas
+ })
+ }
+
+ return (
+
+ )
+}
+
+export default EarthMap
\ No newline at end of file
diff --git a/javascript/src/components/Common/entityInfoIdp.js b/javascript/src/components/Common/entityInfoIdp.js
new file mode 100644
index 0000000..acbcaf0
--- /dev/null
+++ b/javascript/src/components/Common/entityInfoIdp.js
@@ -0,0 +1,38 @@
+import {getIdps, getSps} from "../../utils/queries";
+import {idpsKey, spsKey} from "../../utils/queryKeys";
+import {useQuery} from "react-query";
+import Spinner from "./spinner";
+import React from "react";
+
+const EntityInfoIdp = ({
+ tenenvId,
+ idpId
+ }) => {
+ const idpEntities =
+ useQuery(
+ [idpsKey, {
+ params: {
+ 'tenenv_id': tenenvId,
+ 'idpId': idpId
+ }
+ }],
+ getIdps,
+ {
+ enabled: !!idpId
+ })
+
+ if (idpEntities.isLoading
+ || idpEntities.isFetching) {
+ return ( )
+ }
+
+ if (idpEntities?.data?.length == 0) {
+ return null
+ }
+
+ return (
+ {idpEntities.data?.[0]?.name} ({idpEntities.data?.[0]?.entityid})
+ )
+}
+
+export default EntityInfoIdp
\ No newline at end of file
diff --git a/javascript/src/components/Common/entityInfoSp.js b/javascript/src/components/Common/entityInfoSp.js
new file mode 100644
index 0000000..f7ae706
--- /dev/null
+++ b/javascript/src/components/Common/entityInfoSp.js
@@ -0,0 +1,36 @@
+import {getSps} from "../../utils/queries";
+import {spsKey} from "../../utils/queryKeys";
+import {useQuery} from "react-query";
+import Spinner from "./spinner";
+import React from "react";
+
+const EntityInfoSp = ({
+ tenenvId,
+ spId
+ }) => {
+ const spEntities =
+ useQuery([spsKey, {
+ params: {
+ 'tenenv_id': tenenvId,
+ 'spId': spId
+ }
+ }], getSps,
+ {
+ enabled: !!spId
+ })
+
+ if (spEntities.isLoading
+ || spEntities.isFetching) {
+ return ( )
+ }
+
+ if (spEntities?.data?.length == 0) {
+ return null
+ }
+
+ return (
+ {spEntities.data?.[0]?.name} ({spEntities.data?.[0]?.identifier})
+ )
+}
+
+export default EntityInfoSp
\ No newline at end of file
diff --git a/javascript/src/components/Common/footer.js b/javascript/src/components/Common/footer.js
new file mode 100644
index 0000000..e8e8b8a
--- /dev/null
+++ b/javascript/src/components/Common/footer.js
@@ -0,0 +1,72 @@
+import React, {useContext} from 'react';
+import Image from 'react-bootstrap/Image';
+import Col from 'react-bootstrap/Col';
+import Row from 'react-bootstrap/Row';
+import DropdownButton from 'react-bootstrap/DropdownButton';
+import Dropdown from 'react-bootstrap/Dropdown';
+import parse from 'html-react-parser';
+import {useTranslation} from 'react-i18next';
+import config from '../../config.json'
+import {languageContext} from '../../Context/context';
+
+const Footer = (props) => {
+ const [language, setLanguage] = useContext(languageContext)
+ const {t, i18n} = useTranslation();
+
+ return (
+
+
+
+
+
+ {
+ setLanguage(e);
+ i18n.changeLanguage(e)
+ }}
+ className="ssp-btn btn ssp-btn__footer dropdown-toggle"
+ id='dropdown-button-drop-up' key="up"
+ title={
+
+ {language === 'en' ? 'English' : 'Greek'}
+ } drop="up" variant="link">
+ English
+ Greek
+
+
+
+
+
+
+
+
+
+ Copyright ©2023
+
+
+
+
+
+
+
+
+
+ {config?.footer_description && parse(config?.footer_description)} |
+ Powered by
RCIAM
+
+
+
+
+
+ )
+}
+
+export default Footer
\ No newline at end of file
diff --git a/javascript/src/components/Common/header.js b/javascript/src/components/Common/header.js
new file mode 100644
index 0000000..f82b19e
--- /dev/null
+++ b/javascript/src/components/Common/header.js
@@ -0,0 +1,46 @@
+import React, {useState, useEffect} from 'react';
+import {FontAwesomeIcon} from '@fortawesome/react-fontawesome';
+import Image from 'react-bootstrap/Image';
+import {faTimes} from '@fortawesome/free-solid-svg-icons';
+import parse from 'html-react-parser';
+import NavbarTop from './navbarTop';
+import config from '../../config.json'
+
+const Header = (props) => {
+ const [bannerAlertInfo, setBannerAlertInfo] = useState([]);
+
+ useEffect(() => {
+ setBannerAlertInfo(props.bannerAlertInfo);
+ }, [props.bannerAlertInfo])
+
+ return (
+
+
+ {bannerAlertInfo && bannerAlertInfo[0] &&
+
+
+ {parse(bannerAlertInfo[0].alert_message)}
+
+
{
+ setBannerAlertInfo([...bannerAlertInfo.slice(1)])
+ }}>
+
+
+
+ }
+
0}/>
+
+
+
+ {config?.home_page_title}
+
+
+
+ );
+}
+
+export default Header
\ No newline at end of file
diff --git a/javascript/src/components/Common/i18n.js b/javascript/src/components/Common/i18n.js
new file mode 100644
index 0000000..77cc7df
--- /dev/null
+++ b/javascript/src/components/Common/i18n.js
@@ -0,0 +1,35 @@
+import i18n from 'i18next';
+import {initReactI18next} from 'react-i18next';
+
+import Backend from 'i18next-http-backend';
+import LanguageDetector from 'i18next-browser-languagedetector';
+// don't want to use this?
+// have a look at the Quick start guide
+// for passing in lng and translations on init
+
+i18n
+ // load translation using http -> see /public/locales (i.e. https://github.com/i18next/react-i18next/tree/master/example/react/public/locales)
+ // learn more: https://github.com/i18next/i18next-http-backend
+ // want your translations to be loaded from a professional CDN? => https://github.com/locize/react-tutorial#step-2---use-the-locize-cdn
+ .use(Backend)
+ // detect user language
+ // learn more: https://github.com/i18next/i18next-browser-languageDetector
+ .use(LanguageDetector)
+ // pass the i18n instance to react-i18next.
+ .use(initReactI18next)
+ // init i18next
+ // for all options read: https://www.i18next.com/overview/configuration-options
+ .init({
+ backend: {
+ loadPath: 'metrics/locales/{{lng}}/{{ns}}.json'
+ },
+ fallbackLng: 'en',
+ debug: false,
+
+ interpolation: {
+ escapeValue: false, // not needed for react as it escapes by default
+ }
+ });
+
+
+export default i18n;
\ No newline at end of file
diff --git a/javascript/src/components/Common/layout.js b/javascript/src/components/Common/layout.js
new file mode 100644
index 0000000..6c69f23
--- /dev/null
+++ b/javascript/src/components/Common/layout.js
@@ -0,0 +1,3 @@
+export default function Layout({ children }) {
+ return {children}
;
+ }
\ No newline at end of file
diff --git a/javascript/src/components/Common/main.js b/javascript/src/components/Common/main.js
new file mode 100644
index 0000000..3a766c5
--- /dev/null
+++ b/javascript/src/components/Common/main.js
@@ -0,0 +1,3 @@
+export default function Main({ children }) {
+ return {children} ;
+ }
\ No newline at end of file
diff --git a/javascript/src/components/Common/middleware.js b/javascript/src/components/Common/middleware.js
new file mode 100644
index 0000000..6aa2ae5
--- /dev/null
+++ b/javascript/src/components/Common/middleware.js
@@ -0,0 +1,35 @@
+import React, {useEffect} from 'react'
+import Communities from "../../Pages/Communities";
+import Users from "../../Pages/Users";
+import Dashboard from "../../Pages/Dashboard";
+import Idps from "../../Pages/Idps";
+import Sps from "../../Pages/Sps";
+import Sp from "../../Pages/Sps/sp";
+import Idp from "../../Pages/Idps/idp";
+import Login from "../../Pages/Authentication/Login";
+import ErrorPage from "../../Pages/Error";
+import {useParams} from "react-router-dom";
+import {useCookies} from "react-cookie";
+import config from '../../config.json'
+
+const Middleware = ({elementName}) => {
+ const [cookies, setCookie] = useCookies();
+
+ // We only want to set the Cookies once since we will cause an infinite
+ // rerender in case we do not.
+ useEffect(() => {
+ // XXX We set the environment and tenant globally
+ console.log('config tenant', config)
+ console.log('config environment', config)
+ console.log('hostname', window.location.hostname)
+ setCookie('x-tenant', config.tenant, {path: '/'});
+ setCookie('x-environment', config.environment, {path: '/'});
+ }, []);
+
+ const Component = elementName
+ return (
+
+ )
+}
+
+export default Middleware
\ No newline at end of file
diff --git a/javascript/src/components/Common/navbarTop.js b/javascript/src/components/Common/navbarTop.js
new file mode 100644
index 0000000..c369b8d
--- /dev/null
+++ b/javascript/src/components/Common/navbarTop.js
@@ -0,0 +1,63 @@
+import React, {useContext} from 'react';
+import Navbar from 'react-bootstrap/Navbar';
+import DropdownButton from 'react-bootstrap/DropdownButton';
+import Dropdown from 'react-bootstrap/Dropdown';
+import {userinfoContext} from '../../Context/context';
+import {useTranslation} from 'react-i18next';
+import Login from "../../Pages/Authentication/Login"
+import {FontAwesomeIcon} from "@fortawesome/react-fontawesome";
+import {faSignOutAlt} from "@fortawesome/free-solid-svg-icons";
+import config from '../../config.json'
+
+const NavbarTop = (props) => {
+ // eslint-disable-next-line
+ const [userInfo, setUserInfo] = useContext(userinfoContext);
+ // eslint-disable-next-line
+ const {t, i18n} = useTranslation();
+
+ if (!config || !config?.theme_color) {
+ return null
+ }
+
+ const handleLogoutClick = () => {
+ // Redirect to the logout endpoint
+ window.location.href = config?.logout_url
+ }
+
+ return (
+
+
+ {
+ userInfo != undefined ?
+
+
+ {userInfo ? userInfo.name : 'login'}
+ {userInfo && ' (' + userInfo.email + ')'}
+
+ >
+ }
+ id="dropdown-menu-align-left"
+ >
+
+
+ {userInfo.voperson_id}(voPerson)
+
+
+ {t('logout')}
+
+
+
+
+ :
+ }
+
+
+ )
+}
+export default NavbarTop
+
\ No newline at end of file
diff --git a/javascript/src/components/Common/sideNav.js b/javascript/src/components/Common/sideNav.js
new file mode 100644
index 0000000..777a9a8
--- /dev/null
+++ b/javascript/src/components/Common/sideNav.js
@@ -0,0 +1,75 @@
+import React, {useState, useEffect} from 'react';
+import {Link} from 'react-router-dom'
+import Sidebar from "react-bootstrap-sidebar-menu";
+import {FontAwesomeIcon} from '@fortawesome/react-fontawesome';
+import {faDoorOpen, faHome, faUser, faUsers, faWarehouse} from '@fortawesome/free-solid-svg-icons';
+import {useCookies} from 'react-cookie';
+
+
+const SideNav = ({
+ userInfo,
+ permissions
+ }) => {
+ const [reload, setReload] = useState(false)
+ const [cookies, setCookie] = useCookies();
+
+ const tenant = cookies['x-tenant']
+ const environment = cookies['x-environment']
+
+ useEffect(() => {
+ setReload((prev) => !prev)
+ }, [userInfo, permissions])
+
+ return (
+
+
+
+
+
+
+
+ {/* Home */}
+
+
+ Home
+
+ {/* Identity Providers */}
+
+
+ Identity Providers
+
+ {/* Services */}
+
+
+ Services
+
+ {
+ userInfo != undefined
+ && !!permissions?.actions?.registered_users?.['view'] ?
+ // Users
+ (
+
+ Users
+ ) : null
+ }
+ {
+ userInfo != undefined
+ && !!permissions?.actions?.communities?.['view'] ?
+ // Communities
+ (
+
+ Communities
+ ) : null
+ }
+
+
+
+
+ )
+}
+export default SideNav
\ No newline at end of file
diff --git a/javascript/src/components/Common/spinner.js b/javascript/src/components/Common/spinner.js
new file mode 100644
index 0000000..6396b19
--- /dev/null
+++ b/javascript/src/components/Common/spinner.js
@@ -0,0 +1,15 @@
+import React from 'react'
+import "../../components/Common/Style/spinner.css";
+
+const Spinner = () => {
+ return (
+
+ )
+}
+
+export default Spinner
\ No newline at end of file
diff --git a/javascript/src/components/Common/utils.js b/javascript/src/components/Common/utils.js
new file mode 100644
index 0000000..a0cc8a5
--- /dev/null
+++ b/javascript/src/components/Common/utils.js
@@ -0,0 +1,222 @@
+import $ from "jquery";
+import {options} from "../../utils/helpers/enums";
+import {useNavigate} from "react-router-dom";
+
+export function convertDateByGroup(jsDate, groupBy) {
+ var month = (jsDate.getMonth() + 1).toString()
+ if (month.length < 2) {
+ month = '0' + month;
+ }
+ var day = jsDate.getDate().toString()
+ if (day.length < 2) {
+ day = '0' + day;
+ }
+ if (groupBy == 'day') {
+ var showDate = jsDate.getFullYear() + '-' + month + '-' + day;
+ } else if (groupBy == 'week') {
+ var showDate = jsDate.getFullYear() + '-' + month + '-' + day;
+ var nextWeek = new Date(jsDate.setDate(jsDate.getDate() + 6));
+ month = (nextWeek.getMonth() + 1).toString()
+ if (month.length < 2) {
+ month = '0' + month;
+ }
+ day = nextWeek.getDate().toString()
+ if (day.length < 2) {
+ day = '0' + day;
+ }
+ showDate += " to " + nextWeek.getFullYear() + '-' + month + '-' + day;
+ } else if (groupBy == 'month') {
+ var showDate = jsDate.getFullYear() + '-' + month;
+ } else if (groupBy == 'year') {
+ var showDate = jsDate.getFullYear();
+ }
+ return showDate;
+}
+
+export function getWeekNumber(d) {
+
+ d = new Date(Date.UTC(d.getFullYear(), d.getMonth(), d.getDate()));
+ // Set to nearest Thursday: current date + 4 - current day number
+ // Make Sunday's day number 7
+ d.setUTCDate(d.getUTCDate() + 4 - (d.getUTCDay() || 7));
+ // Get first day of year
+ var yearStart = new Date(Date.UTC(d.getUTCFullYear(), 0, 1));
+ // Calculate full weeks to nearest Thursday
+ var weekNo = Math.ceil((((d - yearStart) / 86400000) + 1) / 7);
+ // Return array of year and week number
+ return weekNo + ' (' + d.getUTCFullYear() + ')';
+}
+
+// Sort data and convert them to html list
+export function convertToList(data, seperator) {
+ var lis = ''
+ data.split(seperator).sort(function (a, b) {
+ var nameA = a.toUpperCase(); // ignore upper and lowercase
+ var nameB = b.toUpperCase(); // ignore upper and lowercase
+ if (nameA < nameB) {
+ return -1;
+ }
+ if (nameA > nameB) {
+ return 1;
+ }
+ // names must be equal
+ return 0;
+
+ }).forEach(function (value) {
+ lis += '' + value.trim() + ' '
+ })
+ return lis
+}
+
+// Calculate Legends Area for Map
+export function calculateLegends(maxSum) {
+ // Set Number of Legends
+ var numLegends = maxSum < 5 ? maxSum : 5;
+ var spaces = Math.round(maxSum / numLegends);
+ var legends = [];
+ var fill = ["#09EBEE", "#19CEEB", "#28ACEA", "#388EE9", "#3D76E0"];
+ for (var i = 0; i < numLegends; i++) {
+ var maxValue = ((i + 1) != numLegends ? ((i + 1) * spaces) : maxSum);
+ var legend = {
+ min: i * spaces,
+ max: maxValue,
+ attrs: {
+ fill: fill[i]
+ },
+ label: i * spaces + "-" + maxValue
+ }
+ legends.push(legend)
+ }
+ return legends;
+}
+
+export function setMapConfiguration() {
+ return {
+ name: "world_countries_mercator",
+ zoom: {
+ enabled: true,
+ maxLevel: 15,
+ init: {
+ latitude: 38.938048,
+ longitude: -2.924315,
+ level: 5
+ }
+ },
+ defaultArea: {
+ attrs: {
+ fill: "#ccc", // my function for color i want to define
+ stroke: "#5d5d5d",
+ "stroke-width": 0.2,
+ "stroke-linejoin": "round",
+
+ },
+ attrsHover: {
+ fill: "#E98300",
+ animDuration: 300
+ },
+
+ },
+ }
+}
+
+export function setLegend(legendLabel, legends) {
+ return {
+ area: {
+ title: legendLabel,
+ titleAttrs: {"font": "unset", "font-size": "12px", "font-weight": "bold"},
+ slices: legends
+ }
+ }
+}
+
+// Find the min and max values at an Array
+export function calculateMinMax(dataArray) {
+ let min = dataArray[0][0]['min'], max = dataArray[0][0]['max']
+ for (let i = 1; i < dataArray.length; i++) {
+ let minValue = dataArray[i][0]['min']
+ let maxValue = dataArray[i][0]['max']
+ min = (minValue < min) ? minValue : min
+ max = (maxValue > max) ? maxValue : max
+ }
+ return [min, max]
+}
+
+export const createAnchorElement = (title, link) => {
+ const anchor = document.createElement('a');
+ const linkText = document.createTextNode(title);
+ anchor.appendChild(linkText);
+ anchor.title = title;
+ anchor.href = link;
+
+ // FIXME:
+ // Trying to pass an object directly to the datatable will fail. We need to
+ // get the HTML string from the element. This causes inconsistent behavior
+ // which we need to solve. For now we leave it as is.
+ return anchor.outerHTML;
+ // return anchor;
+}
+
+export const axisChartOptions = (title, hAxisFormat, hAxisTicks) => {
+ return (
+ {
+ title: title,
+ backgroundColor: {fill: 'transparent'},
+ vAxis: {
+ format: '0'
+ },
+ hAxis: {
+ format: hAxisFormat,
+ maxTextLines: 2,
+ textStyle: {fontSize: 15},
+ ticks: hAxisTicks,
+ },
+ tooltip: {isHtml: true},
+ width: '100%',
+ height: '350',
+ bar: {groupWidth: "92%"},
+ legend: {position: "none"},
+ }
+ )
+}
+
+export function sortByNamePropertyCallback(a, b) {
+ const nameA = a.name.toUpperCase(); // ignore upper and lowercase
+ const nameB = b.name.toUpperCase(); // ignore upper and lowercase
+ if (nameA < nameB) {
+ return -1;
+ }
+ if (nameA > nameB) {
+ return 1;
+ }
+ // names must be equal
+ return 0;
+}
+
+export function parseDateWithoutTimezone(dateString, type) {
+ const dateWithoutTimezone = new Date(dateString);
+ dateWithoutTimezone.setMinutes(dateWithoutTimezone.getMinutes() - dateWithoutTimezone.getTimezoneOffset());
+
+ return dateWithoutTimezone;
+};
+
+export function formatStartDate(date) {
+ // Check if a valid date object is received
+ if (date instanceof Date && !isNaN(date.getTime())) {
+ // Set the time to midnight (00:00:00)
+ date.setMinutes(date.getMinutes() - date.getTimezoneOffset());
+ date.setHours(0, 0, 0, 0);
+ return date;
+ }
+ return null;
+}
+
+export function formatEndDate(date) {
+ // Check if a valid date object is received
+ if (date instanceof Date && !isNaN(date.getTime())) {
+ // Set the time to midnight (23:59:59)
+ date.setMinutes(date.getMinutes() - date.getTimezoneOffset());
+ date.setHours(23, 59, 59, 59);
+ return date;
+ }
+ return null;
+}
diff --git a/javascript/src/components/Communities/communitiesChart.js b/javascript/src/components/Communities/communitiesChart.js
new file mode 100644
index 0000000..572eb7e
--- /dev/null
+++ b/javascript/src/components/Communities/communitiesChart.js
@@ -0,0 +1,154 @@
+import React, {useState, useEffect} from "react";
+import {Chart} from "react-google-charts";
+import {
+ convertDateByGroup,
+ getWeekNumber,
+ axisChartOptions
+} from "../Common/utils";
+import Select from 'react-select';
+import Container from 'react-bootstrap/Container';
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import ListCommunities from "./listCommunities";
+import 'bootstrap/dist/css/bootstrap.min.css';
+import {
+ options,
+ options_group_by
+} from "../../utils/helpers/enums";
+import {useQuery, useQueryClient} from "react-query";
+import {communitiesGroupByKey} from "../../utils/queryKeys";
+import {getCommunitiesGroupBy} from "../../utils/queries";
+
+const CommunitiesChart = ({tenenvId}) => {
+
+ const [selected, setSelected] = useState(options_group_by[0].value);
+ const [communities, setCommunities] = useState();
+ const [global_options, setGlobalOptions] = useState();
+ const queryClient = useQueryClient();
+ const controller = new AbortController
+
+
+ let params = {
+ params: {
+ 'interval': selected,
+ 'count_interval': options[selected]["count_interval"],
+ 'tenenv_id': tenenvId,
+ },
+ signal: controller.signal
+ }
+
+ const communitiesGroupBy = useQuery(
+ [communitiesGroupByKey, {groupBy: selected, params: params}],
+ getCommunitiesGroupBy,
+ {
+ enabled: false
+ }
+ )
+
+ useEffect(() => {
+ params = {
+ params: {
+ 'interval': selected,
+ 'count_interval': options[selected]["count_interval"],
+ 'tenenv_id': tenenvId,
+ },
+ signal: controller.signal
+ }
+
+ try {
+ const response = queryClient.refetchQueries([communitiesGroupByKey, {groupBy: selected, params: params}])
+ } catch (error) {
+ // todo: Here we can handle any authentication or authorization errors
+ console.log(error)
+ }
+
+ return () => {
+ controller.abort()
+ }
+ }, [selected, tenenvId])
+
+
+ // Construct the data required for the datatable
+ useEffect(() => {
+ if (!communitiesGroupBy.isLoading
+ && !communitiesGroupBy.isFetching
+ && communitiesGroupBy.isSuccess
+ && !!communitiesGroupBy.data) {
+
+ const hticksArray = communitiesGroupBy?.data?.map(element => ({
+ v: new Date(element?.range_date),
+ f: selected === "week" ? getWeekNumber(new Date(element?.range_date)) : new Date(element?.range_date)
+ })
+ )
+
+ let fValues = [
+ ['Date',
+ 'Count',
+ {
+ 'type': 'string',
+ 'role': 'tooltip',
+ 'p': {'html': true}
+ }
+ ]
+ ]
+
+ const charData = communitiesGroupBy?.data?.map(element => ([
+ new Date(element?.range_date),
+ parseInt(element['count']),
+ `${convertDateByGroup(new Date(element?.range_date), selected)} Communities: ${parseInt(element['count'])}
`
+ ])
+ )
+
+
+ setCommunities(fValues.concat(charData))
+ setGlobalOptions(axisChartOptions(options[selected]["title"], options[selected]["hAxis"]["format"],
+ hticksArray))
+ }
+ }, [!communitiesGroupBy.isLoading
+ && !communitiesGroupBy.isFetching
+ && communitiesGroupBy.isSuccess])
+
+ // XXX Google Chart will not work if we return empty and then
+ // try to reload
+ // if (communitiesGroupBy.isLoading
+ // || communitiesGroupBy.isFetching
+ // || communities?.length === 0) {
+ // return null
+ // }
+
+ return (
+
+
+
+
Number of Communities created
+
+
+
+ Data loading}
+ options={global_options}/>
+
+
+
+
+ Select Period:
+
+ setSelected(event?.value)}/>
+
+
+
+
+
+
+
+
+
+
+ )
+}
+
+export default CommunitiesChart
\ No newline at end of file
diff --git a/javascript/src/components/Communities/communitiesDataTable.js b/javascript/src/components/Communities/communitiesDataTable.js
new file mode 100644
index 0000000..cafefc4
--- /dev/null
+++ b/javascript/src/components/Communities/communitiesDataTable.js
@@ -0,0 +1,206 @@
+import React, {useState, useEffect, useRef} from "react";
+import "jquery/dist/jquery.min.js";
+import $ from "jquery";
+import Datatable from "../../components/datatable";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import DatePicker from "react-datepicker";
+import Dropdown from 'react-dropdown';
+import {toast} from 'react-toastify';
+import {convertDateByGroup, formatStartDate, formatEndDate} from "../Common/utils";
+import 'react-toastify/dist/ReactToastify.css';
+import 'react-dropdown/style.css';
+import "react-datepicker/dist/react-datepicker.css";
+import {dropdownOptions} from "../../utils/helpers/enums";
+import {useQuery, useQueryClient} from "react-query";
+import {communitiesGroupByKey, minDateCommunitiesKey} from "../../utils/queryKeys";
+import {getCommunitiesGroupBy, getMinDateCommunities} from "../../utils/queries";
+import Spinner from "../Common/spinner";
+import {format} from "date-fns";
+
+const CommunitiesDataTable = ({tenenvId}) => {
+ const oneYearAgo = new Date();
+ oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
+ formatStartDate(oneYearAgo)
+
+ const today = new Date();
+ today.setDate(today.getDate() - 1);
+ formatEndDate(today)
+ const dropdownRef = useRef(null);
+ const [communitiesPerPeriod, setCommunitiesPerPeriod] = useState([]);
+ const [minDate, setMinDate] = useState(null);
+ const [endDate, setEndDate] = useState(today);
+ const [startDate, setStartDate] = useState(oneYearAgo);
+ const [dropdownOptionsState, setDropdownOptions] = useState(dropdownOptions);
+ const [groupBy, setGroupBy] = useState("month")
+ const controller = new AbortController
+
+
+ const queryClient = useQueryClient();
+
+ let params = {
+ params: {
+ 'startDate': !startDate ? oneYearAgo : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? today : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'tenenv_id': tenenvId
+ },
+ signal: controller.signal
+ }
+
+ const communitiesGroupBy = useQuery(
+ [communitiesGroupByKey, {groupBy: groupBy, params: params}],
+ getCommunitiesGroupBy,
+ {
+ enabled: false,
+ refetchOnWindowFocus: false
+ }
+ )
+
+ const minDateCommunities = useQuery(
+ [minDateCommunitiesKey, params],
+ getMinDateCommunities,
+ {
+ enabled: false,
+ refetchOnWindowFocus: false
+ }
+ )
+
+ useEffect(() => {
+ params = {
+ params: {
+ 'startDate': !startDate ? oneYearAgo : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? today : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'tenenv_id': tenenvId
+ },
+ signal: controller.signal
+ }
+
+ try {
+ const response = queryClient.refetchQueries([communitiesGroupByKey, {groupBy: groupBy, params: params}])
+ queryClient.refetchQueries([minDateCommunitiesKey, {params:{tenenv_id: tenenvId}}])
+ } catch (error) {
+ // todo: Here we can handle any authentication or authorization errors
+ console.log(error)
+ }
+
+ return () => {
+ controller.abort()
+ }
+ }, [groupBy])
+
+
+ // Construct the data required for the datatable
+ useEffect(() => {
+ if(groupBy == "") {
+ return;
+ }
+ const communitiesGroupByPerPeriodArray = !communitiesGroupBy.isLoading
+ && !communitiesGroupBy.isFetching
+ && communitiesGroupBy.isSuccess
+ && communitiesGroupBy?.data?.map(element => ({
+ "Date": convertDateByGroup(new Date(element?.range_date), groupBy),
+ "Number of Communities": element?.count,
+ "Names": element?.names
+ }))
+
+ if (!!communitiesGroupBy?.data
+ && !!communitiesGroupByPerPeriodArray) {
+ // We only keep the first date because the backend returns the dataset sorted and we only care about the
+ // min of the min dates.
+ if (minDate == undefined || minDate == "") {
+ console.log(minDateCommunities?.data?.min_date)
+ setMinDate(!!minDateCommunities?.data?.min_date ? new Date(minDateCommunities?.data?.min_date) : null)
+ }
+ $("#table-community").DataTable().destroy()
+ setCommunitiesPerPeriod(communitiesGroupByPerPeriodArray)
+ }
+ }, [communitiesGroupBy.isSuccess && minDateCommunities.isSuccess, groupBy])
+
+ const handleAddOption = () => {
+ // Create a new option dynamically
+ const newOption = {value: '', label: 'Filter'};
+
+ // Check if the new option already exists in the options array
+ if (!dropdownOptionsState.some(option => option.value === newOption.value)) {
+ // If it doesn't exist, add it to the options array
+ setDropdownOptions([newOption, ...dropdownOptionsState]);
+ }
+ };
+
+ const handleStartDateChange = (date) => {
+ if(groupBy!=''){
+ handleAddOption()
+ }
+ date = formatStartDate(date);
+ if(date != null) {
+ if(endDate!=date){
+ setGroupBy("")
+ }
+ setStartDate(date);
+ dropdownRef.current.state.selected.label = 'Filter';
+ }
+
+ };
+
+ const handleEndDateChange = (date) => {
+ if(groupBy!=''){
+ handleAddOption()
+ }
+ //date = formatEndDate(date);
+ if(date != null) {
+ if(endDate!=date){
+ setGroupBy("")
+ }
+ setEndDate(date);
+ dropdownRef.current.state.selected.label = 'Filter';
+ }
+
+ };
+
+ const handleChange = (event) => {
+ if (!startDate || !endDate) {
+ toast.warning("You have to fill both startDate and endDate")
+ return
+ }
+ setGroupBy(event.value)
+ };
+
+
+
+ return
+
+
+
Number of communities
+
+
+
+
+ From:
+ To:
+
+
+
+ {
+
+
+ }
+
+
+
+
+}
+
+export default CommunitiesDataTable
\ No newline at end of file
diff --git a/javascript/src/components/Communities/communitiesMap.js b/javascript/src/components/Communities/communitiesMap.js
new file mode 100644
index 0000000..cf723b3
--- /dev/null
+++ b/javascript/src/components/Communities/communitiesMap.js
@@ -0,0 +1,103 @@
+import React, {useState, useRef} from "react";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import Select from 'react-select';
+import 'jquery-mapael';
+import 'jquery-mapael/js/maps/world_countries_mercator.js';
+import {useQuery, useQueryClient} from "react-query";
+import {
+ communitiesKey,
+ countryStatsByVoKey
+} from "../../utils/queryKeys";
+import {getCommunities, getCountryStatsByVo} from "../../utils/queries";
+import EarthMap from "../Common/earthMap";
+import MemberStatusReport from "./memberStatusReport";
+
+const CommunitiesMap = ({tenenvId}) => {
+ const [selectedCommunity, setSelectedCommunity] = useState({});
+ const communityId = useRef(null)
+
+ const queryClient = useQueryClient();
+
+ let params = {
+ params: {
+ 'tenenv_id': tenenvId
+ }
+ }
+
+ const communitiesQuery = useQuery(
+ [communitiesKey, params],
+ getCommunities
+ )
+
+ const countryStatsQuery = useQuery(
+ [countryStatsByVoKey, {countryId: communityId.current, params: params}],
+ getCountryStatsByVo, {
+ enabled: !!communityId?.current
+ }
+ )
+
+
+ // Create the option list
+ const communitiesOptionsList = !communitiesQuery.isLoading
+ && communitiesQuery.isSuccess
+ && communitiesQuery.isFetched
+ && communitiesQuery.data.length > 0
+ && communitiesQuery?.data?.map((elem) => ({
+ label: elem.name,
+ value: elem.id
+ }))
+
+ const handleChange = (event) => {
+ communityId.current = event.value;
+
+ const filteredCommunity = !communitiesQuery.isLoading
+ && communitiesQuery.isSuccess
+ && communitiesQuery.isFetched
+ && communitiesQuery.data.length > 0
+ && communitiesQuery?.data?.filter((elem) => elem.id == event.value)
+
+ setSelectedCommunity(filteredCommunity.pop())
+
+ try {
+ const response = queryClient.refetchQueries([countryStatsByVoKey, {
+ countryId: communityId.current,
+ params: params
+ }])
+ } catch (error) {
+ // todo: Here we can handle any authentication or authorization errors
+ console.log(error)
+ }
+ }
+
+ return (
+
+
+
+
Statistics Per Community
+
+
+
+
+ {
+ !!selectedCommunity?.name &&
+
+ {selectedCommunity?.name}
+ {selectedCommunity?.description}
+
+ }
+
+
+
+
+
+
+ )
+}
+
+export default CommunitiesMap;
\ No newline at end of file
diff --git a/javascript/src/components/Communities/listCommunities.js b/javascript/src/components/Communities/listCommunities.js
new file mode 100644
index 0000000..4025420
--- /dev/null
+++ b/javascript/src/components/Communities/listCommunities.js
@@ -0,0 +1,55 @@
+import React, {useEffect} from "react";
+import {sortByNamePropertyCallback} from "../Common/utils"
+import ReactTooltip from "react-tooltip";
+import Spinner from "../Common/spinner";
+
+const ListCommunities = ({communities}) => {
+ useEffect(() => {
+ ReactTooltip.rebuild();
+ }, [communities])
+
+
+ if (communities.isLoading
+ || communities.isFetching) {
+ return ( )
+ }
+
+ if (communities?.data == undefined) {
+ return null
+ }
+
+ const communitiesList = communities?.data.map((element) => {
+ // Construct the list with COUs
+ const createdDate = element?.created_date?.split(", ")
+ const description = element?.description?.split("|| ")
+ return element?.names?.split("|| ").map((name, index) => ({
+ name: name,
+ description: `${description[index]} Created Date: ${createdDate[index]}`
+ })
+ )
+ })
+
+ if (communitiesList?.length == 0) {
+ return null
+ }
+
+ return (
+
+ {
+ communitiesList
+ .flat()
+ .sort(sortByNamePropertyCallback)
+ .map((cou, index) => (
+
+ {cou["name"]}
+
+ ))
+ }
+
+
+ )
+}
+
+export default ListCommunities
diff --git a/javascript/src/components/Communities/memberStatusReport.js b/javascript/src/components/Communities/memberStatusReport.js
new file mode 100644
index 0000000..b709ea2
--- /dev/null
+++ b/javascript/src/components/Communities/memberStatusReport.js
@@ -0,0 +1,73 @@
+import Col from "react-bootstrap/Col";
+import Row from "react-bootstrap/Row";
+import {useQuery} from "react-query";
+import {communityMembersByStatusKey} from "../../utils/queryKeys";
+import {getCommunityMembersByStatus} from "../../utils/queries";
+import Spinner from "../Common/spinner";
+import React from "react";
+
+const MemberStatusReport = ({
+ tenenvId,
+ communityId
+ }) => {
+
+
+ let params = {
+ params: {
+ 'tenenv_id': tenenvId,
+ 'community_id': communityId,
+ }
+ }
+
+ const communityMembersByStatusQuery = useQuery(
+ [communityMembersByStatusKey, params],
+ getCommunityMembersByStatus, {
+ enabled: (tenenvId != undefined && communityId != undefined)
+ }
+ )
+
+ if(communityMembersByStatusQuery.isLoading
+ || communityMembersByStatusQuery.isFetching) {
+ return ( )
+ }
+
+ // Create the option list
+ const {activeUsers, graceUsers, otherUsers} = !communityMembersByStatusQuery.isLoading
+ && communityMembersByStatusQuery.isSuccess
+ && communityMembersByStatusQuery.isFetched
+ && communityMembersByStatusQuery.data.length > 0
+ && communityMembersByStatusQuery?.data?.reduce((acc, member) => {
+ if (member.status === 'A') {
+ acc.activeUsers = member.count
+ } else if (member.status === 'GP') {
+ acc.graceUsers = member.count
+ } else {
+ acc.otherUsers += member.count
+ }
+ return acc
+ }, {activeUsers: 0, graceUsers: 0, otherUsers: 0})
+
+ if (tenenvId == undefined
+ || communityId == undefined) {
+ return null
+ }
+
+ return (
+
+
+ ACTIVE USERS
+ {activeUsers}
+
+
+ GRACE PERIOD USERS
+ {graceUsers}
+
+
+ OTHER STATUS USERS
+ {otherUsers}
+
+
+ )
+}
+
+export default MemberStatusReport;
\ No newline at end of file
diff --git a/javascript/src/components/Dashboard/loginDataTable.js b/javascript/src/components/Dashboard/loginDataTable.js
new file mode 100644
index 0000000..b34d524
--- /dev/null
+++ b/javascript/src/components/Dashboard/loginDataTable.js
@@ -0,0 +1,207 @@
+import React, {useState, useEffect, useRef} from "react";
+import "jquery/dist/jquery.min.js";
+import $ from "jquery";
+import Datatable from "../datatable";
+import dateFormat from 'dateformat';
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import DatePicker from "react-datepicker";
+import Dropdown from 'react-dropdown';
+import 'react-toastify/dist/ReactToastify.css';
+import 'react-dropdown/style.css';
+import "react-datepicker/dist/react-datepicker.css";
+import {dropdownOptions} from "../../../src/utils/helpers/enums"
+import {useQuery, useQueryClient} from "react-query";
+import {loginsPerCountryKey, minDateLoginsKey} from "../../utils/queryKeys";
+import {getLoginsPerCountry, getMinDateLogins} from "../../utils/queries";
+import {toast} from "react-toastify";
+import {format} from "date-fns";
+import {convertDateByGroup, formatStartDate, formatEndDate} from "../Common/utils";
+
+const LoginDataTable = ({
+ startDateHandler,
+ endDateHandler,
+ minDateHandler,
+ tenenvId,
+ uniqueLogins
+ }) => {
+ const oneYearAgo = new Date();
+ oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
+ formatStartDate(oneYearAgo)
+
+ const today = new Date();
+ today.setDate(today.getDate() - 1);
+ formatEndDate(today)
+
+ const dropdownRef = useRef(null);
+ const [loginsPerCountryPerPeriod, setLoginsPerCountryPerPeriod] = useState([]);
+ const [minDate, setMinDate] = useState(null);
+ // By default we fetch by month
+ const [groupBy, setGroupBy] = useState("month");
+ const [endDate, setEndDate] = useState(today);
+ const [startDate, setStartDate] = useState(oneYearAgo);
+ const [dropdownOptionsState, setDropdownOptions] = useState(dropdownOptions);
+ const queryClient = useQueryClient();
+
+
+ let params = {
+ params: {
+ 'group_by': groupBy,
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'tenenv_id': tenenvId,
+ 'unique_logins': uniqueLogins
+ },
+ }
+
+ const loginsPerCountry = useQuery(
+ [loginsPerCountryKey, params],
+ getLoginsPerCountry,
+ {
+ enabled: false,
+ refetchOnWindowFocus: false
+ }
+ )
+
+ const minDateLogins = useQuery(
+ [minDateLoginsKey, params],
+ getMinDateLogins,
+ {
+ enabled: false,
+ refetchOnWindowFocus: false
+ }
+ )
+
+ useEffect(() => {
+ params = {
+ params: {
+ 'group_by': groupBy,
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'tenenv_id': tenenvId,
+ 'unique_logins': uniqueLogins
+ },
+ }
+
+ try {
+ const response = queryClient.refetchQueries([loginsPerCountryKey, params])
+ queryClient.refetchQueries([minDateLoginsKey, {params:{tenenv_id: tenenvId}}])
+ } catch (error) {
+ // todo: Here we can handle any authentication or authorization errors
+ console.log(error)
+ }
+
+ }, [uniqueLogins, groupBy])
+
+ // Construct the data required for the datatable
+ useEffect(() => {
+ if(groupBy == "") {
+ return;
+ }
+ const loginsPerCountryPerPeriodArray = !loginsPerCountry.isLoading
+ && !loginsPerCountry.isFetching
+ && loginsPerCountry.isSuccess
+ && loginsPerCountry?.data?.map(element => ({
+ "Date": !!element?.range_date ? convertDateByGroup(new Date(element?.range_date), groupBy) : null,
+ "Number of Logins": element?.count,
+ "Number of Logins per Country": element?.countries
+ }))
+
+ if (!!loginsPerCountry?.data && !!loginsPerCountryPerPeriodArray) {
+ // We only keep the first date because the backend returns the dataset sorted and we only care about the
+ // min of the min dates.
+ if (minDate == undefined || minDate == "") {
+ setMinDate(!!minDateLogins?.data?.min_date ? new Date(minDateLogins?.data?.min_date) : null)
+ minDateHandler(!!minDateLogins?.data?.min_date ? new Date(minDateLogins?.data?.min_date) : null)
+
+ }
+ $("#table-login").DataTable().destroy()
+ setLoginsPerCountryPerPeriod(loginsPerCountryPerPeriodArray)
+
+ }
+ }, [uniqueLogins, loginsPerCountry.isSuccess && minDateLogins.isSuccess, groupBy])
+
+ const handleAddOption = () => {
+ // Create a new option dynamically
+ const newOption = {value: '', label: 'Filter'};
+
+ // Check if the new option already exists in the options array
+ if (!dropdownOptionsState.some(option => option.value === newOption.value)) {
+ // If it doesn't exist, add it to the options array
+ setDropdownOptions([newOption, ...dropdownOptionsState]);
+ }
+ };
+
+ const handleStartDateChange = (date) => {
+ if(groupBy!=''){
+ handleAddOption()
+ }
+ date = formatStartDate(date);
+ if(date != null) {
+ if(endDate!=date){
+ setGroupBy("")
+ }
+ setStartDate(date);
+ dropdownRef.current.state.selected.label = 'Filter';
+ }
+
+ };
+
+ const handleEndDateChange = (date) => {
+ if(groupBy!=''){
+ handleAddOption()
+ }
+
+ //date = formatEndDate(date);
+
+ if(date != null) {
+ if(endDate!=date){
+ setGroupBy("")
+ }
+ setEndDate(date);
+ dropdownRef.current.state.selected.label = 'Filter';
+ }
+
+ };
+
+ const handleChange = (event) => {
+ if (!startDate || !endDate) {
+ toast.warning("You have to fill both startDate and endDate")
+ return
+ }
+ setGroupBy(event.value)
+ startDateHandler(startDate)
+ endDateHandler(endDate)
+ };
+
+ return (
+
+
+
+
Number of logins
+
+
+
+ From:
+ To:
+
+
+
+
+
+
+ )
+}
+
+export default LoginDataTable
\ No newline at end of file
diff --git a/javascript/src/components/Dashboard/loginIdpPieChart.js b/javascript/src/components/Dashboard/loginIdpPieChart.js
new file mode 100644
index 0000000..4fe1040
--- /dev/null
+++ b/javascript/src/components/Dashboard/loginIdpPieChart.js
@@ -0,0 +1,129 @@
+import React, {useState, useEffect} from "react";
+import {Chart} from "react-google-charts";
+import {useCookies} from "react-cookie";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import 'bootstrap/dist/css/bootstrap.min.css';
+import "jquery/dist/jquery.min.js";
+import $ from "jquery";
+import {useQuery} from "react-query";
+import {loginsPerIdpKey} from "../../utils/queryKeys";
+import {getLoginsPerIdp} from "../../utils/queries";
+import {optionsPieChart} from "../../utils/helpers/enums";
+import {convertDateByGroup, formatStartDate, formatEndDate} from "../Common/utils";
+
+var idpsArray = [];
+const LoginIdpPieChart = ({
+ spId,
+ tenenvId,
+ uniqueLogins,
+ goToSpecificProviderHandler
+ }) => {
+ const oneYearAgo = new Date();
+ oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
+ formatStartDate(oneYearAgo)
+
+ const today = new Date();
+ today.setDate(today.getDate() - 1);
+ formatEndDate(today)
+
+ let idpsChartArray = [["Identity Provider", "Logins"], ['', 0]];
+ const [idps, setIdps] = useState(idpsChartArray);
+
+ const params = {
+ params: {
+ 'tenenv_id': tenenvId,
+ 'unique_logins': uniqueLogins,
+ 'sp': spId,
+ }
+ }
+
+ const [cookies, setCookie] = useCookies();
+ const permissions = cookies.permissions
+
+ const loginsPerIpd = useQuery(
+ [loginsPerIdpKey, params],
+ getLoginsPerIdp, {
+ refetchOnWindowFocus: false,
+ enabled: false
+ }
+ )
+
+ useEffect(() => {
+ loginsPerIpd.refetch()
+ .then(response => {
+ response?.data?.forEach(element => {
+ idpsChartArray.push([element.name, element.count])
+ idpsArray.push([element.id, element.name, element.identifier])
+ })
+ setIdps(idpsChartArray)
+ })
+
+ }, [uniqueLogins])
+
+ // XXX Google Chart will not work if we return empty and then
+ // try to reload
+ // if (loginsPerIpd.isLoading
+ // || loginsPerIpd.isFetching
+ // || idps.length <= 1
+ // ) {
+ // return null
+ // }
+
+ return (
+
+
+
+
Overall number of logins per IdP
+
+ Data loading}
+ width={"100%"}
+ height={"400px"}
+ className="pieChart"
+ chartEvents={[
+ {
+ eventName: "ready",
+ callback: ({chartWrapper, google}) => {
+ const chart = chartWrapper.getChart();
+
+ if (cookies.userinfo != undefined
+ && !!permissions?.actions?.identity_providers?.['view']) {
+ google.visualization.events.addListener(chart, 'click', selectHandler);
+ }
+
+ google.visualization.events.addListener(chart, 'onmouseover', showTooltip);
+ google.visualization.events.addListener(chart, 'onmouseout', hideTooltip);
+
+ function showTooltip(entry) {
+
+ chart.setSelection([{row: entry.row}]);
+ $('.pieChart').css('cursor', 'pointer')
+ }
+
+ function hideTooltip() {
+
+ chart.setSelection([]);
+ $('.pieChart').css('cursor', 'default')
+ }
+
+ function selectHandler() {
+
+ var selection = chart.getSelection();
+ if (selection.length) {
+ var identifier = idpsArray[selection[0].row];
+ goToSpecificProviderHandler(identifier[0])
+ }
+ }
+ }
+ }
+ ]}
+ />
+
+ );
+}
+
+export default LoginIdpPieChart
\ No newline at end of file
diff --git a/javascript/src/components/Dashboard/loginLineChart.js b/javascript/src/components/Dashboard/loginLineChart.js
new file mode 100644
index 0000000..d005a3a
--- /dev/null
+++ b/javascript/src/components/Dashboard/loginLineChart.js
@@ -0,0 +1,126 @@
+import React, {useState, useEffect, useCallback} from "react";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import 'bootstrap/dist/css/bootstrap.min.css';
+import {getLoginsGroupByDay} from "../../utils/queries";
+import {useQuery, useQueryClient} from "react-query";
+import {loginsGroupByDayKey} from "../../utils/queryKeys";
+import {Chart} from "react-google-charts";
+import Spinner from "../Common/spinner"
+
+const LoginLineChart = ({
+ type,
+ id,
+ tenenvId,
+ uniqueLogins
+ }) => {
+
+ const queryClient = useQueryClient();
+ const [lineData, setLineData] = useState([["Date", "Logins"], ['', 0]])
+
+ let params = {
+ params: {
+ tenenv_id: tenenvId,
+ unique_logins: uniqueLogins
+ },
+
+ }
+
+ const loginsGroupByDay = useQuery(
+ [loginsGroupByDayKey, params],
+ getLoginsGroupByDay,
+ {
+ enabled: false,
+ refetchOnWindowFocus: false
+ }
+ )
+
+ useEffect(() => {
+ params = {
+ params: {
+ tenenv_id: tenenvId,
+ unique_logins: uniqueLogins
+ },
+
+ }
+
+ if (type) {
+ params["params"][type] = id
+ }
+
+
+ try {
+ const response = queryClient.refetchQueries([loginsGroupByDayKey, params])
+ } catch (error) {
+ // todo: Here we can handle any authentication or authorization errors
+ console.log(error)
+ }
+
+ }, [uniqueLogins])
+
+ // Construct the data required for the chart
+ useEffect(() => {
+ const lineDataArray = !loginsGroupByDay.isLoading
+ && !loginsGroupByDay.isFetching
+ && loginsGroupByDay.isSuccess
+ && loginsGroupByDay?.data?.map(element => ([new Date(element.date), element.count ?? 0]))
+ if (!!loginsGroupByDay?.data && !!lineDataArray) {
+ lineDataArray.unshift(["Date", "Logins"])
+ setLineData(lineDataArray)
+ }
+ }, [!loginsGroupByDay.isLoading
+ && !loginsGroupByDay.isFetching
+ && loginsGroupByDay.isSuccess])
+
+ // XXX Google Chart will not work if we return empty and then
+ // try to reload
+ if (loginsGroupByDay.isLoading
+ || loginsGroupByDay.isFetching
+ ) {
+ return ( )
+ }
+
+ if (lineData?.length <= 2) {
+ return null
+ }
+ else return (
+
+
+
+
Overall number of logins per day
+
+ Data loading}
+ options={{
+ legend: 'none'
+ }}
+ controls={[
+ {
+ controlType: "ChartRangeFilter",
+ options: {
+ filterColumnIndex: 0,
+ ui: {
+ chartType: "LineChart",
+ chartOptions: {
+ chartArea: {width: "80%", height: "100%"},
+ hAxis: {baselineColor: "none"},
+ },
+ },
+ },
+ controlPosition: "bottom",
+ },
+ ]}
+ />
+
+
+ );
+}
+
+
+export default React.memo(LoginLineChart, (prevProps, nextProps) => {
+ // Prevent re-render if only `uniqueLogins` changes
+ return prevProps.uniqueLogins === nextProps.uniqueLogins;
+});
\ No newline at end of file
diff --git a/javascript/src/components/Dashboard/loginSpPieChart.js b/javascript/src/components/Dashboard/loginSpPieChart.js
new file mode 100644
index 0000000..cfed697
--- /dev/null
+++ b/javascript/src/components/Dashboard/loginSpPieChart.js
@@ -0,0 +1,128 @@
+import React, {useState, useEffect} from "react";
+import {Chart} from "react-google-charts";
+import {useCookies} from "react-cookie";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import 'bootstrap/dist/css/bootstrap.min.css';
+import "jquery/dist/jquery.min.js";
+import $ from "jquery";
+import {loginsPerSpKey} from "../../utils/queryKeys";
+import {getLoginsPerSP} from "../../utils/queries";
+import {useQuery} from "react-query";
+import {optionsPieChart} from "../../utils/helpers/enums";
+import {convertDateByGroup, formatStartDate, formatEndDate} from "../Common/utils";
+
+var spsArray = [];
+
+const LoginSpPieChart = ({
+ idpId,
+ tenenvId,
+ uniqueLogins,
+ goToSpecificProviderHandler
+ }) => {
+ let spsChartArray = [["Service Provider", "Logins"]];
+ const [sps, setSps] = useState(spsChartArray);
+ const oneYearAgo = new Date();
+ oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
+ formatStartDate(oneYearAgo)
+
+ const today = new Date();
+ today.setDate(today.getDate() - 1);
+ formatEndDate(today)
+
+ const params = {
+ params:
+ {
+ tenenv_id: tenenvId,
+ unique_logins: uniqueLogins,
+ idp: idpId,
+ }
+ }
+
+ const [cookies, setCookie] = useCookies();
+ const permissions = cookies.permissions
+
+ const loginsPerSp = useQuery(
+ [loginsPerSpKey, params],
+ getLoginsPerSP,
+ {
+ refetchOnWindowFocus: false,
+ enabled: false
+ }
+ )
+
+ useEffect(() => {
+ loginsPerSp.refetch()
+ .then(response => {
+ response?.data?.forEach(element => {
+ spsChartArray.push([element.name, element.count])
+ spsArray.push([element.id, element.name, element.identifier])
+ })
+ setSps(spsChartArray)
+ })
+ }, [uniqueLogins])
+
+ // XXX Google Chart will not work if we return empty and then
+ // try to reload
+ // if (sps.length === 1
+ // && (loginsPerSp.isLoading
+ // || loginsPerSp.isFetching)
+ // ) {
+ // return null
+ // }
+
+ return (
+
+
+
+
Overall number of logins per SP
+
+ Data loading}
+ width={"100%"}
+ height={"400px"}
+ className="pieChart"
+ chartEvents={[
+ {
+ eventName: "ready",
+ callback: ({chartWrapper, google}) => {
+ const chart = chartWrapper.getChart();
+
+ if (cookies.userinfo != undefined
+ && !!permissions?.actions?.service_providers?.['view']) {
+ google.visualization.events.addListener(chart, 'click', selectHandler);
+ }
+ google.visualization.events.addListener(chart, 'onmouseover', showTooltip);
+ google.visualization.events.addListener(chart, 'onmouseout', hideTooltip);
+
+ function showTooltip(entry) {
+
+ chart.setSelection([{row: entry.row}]);
+ $('.pieChart').css('cursor', 'pointer')
+ }
+
+ function hideTooltip() {
+ chart.setSelection([]);
+ $('.pieChart').css('cursor', 'default')
+ }
+
+ function selectHandler() {
+ var selection = chart.getSelection();
+ if (selection.length) {
+ var identifier = spsArray[selection[0].row];
+ goToSpecificProviderHandler(identifier[0], "sp")
+ }
+ }
+ }
+ }
+ ]}
+ />
+
+
+ );
+}
+
+export default LoginSpPieChart
\ No newline at end of file
diff --git a/javascript/src/components/Dashboard/loginTiles.js b/javascript/src/components/Dashboard/loginTiles.js
new file mode 100644
index 0000000..4986ce7
--- /dev/null
+++ b/javascript/src/components/Dashboard/loginTiles.js
@@ -0,0 +1,209 @@
+import { useState, useEffect } from "react";
+import { useQuery } from "react-query";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import 'bootstrap/dist/css/bootstrap.min.css';
+import { loginsCountByKey } from "../../utils/queryKeys";
+import { getLoginsCountBy } from "../../utils/queries";
+
+const LoginTiles = (parameters) => {
+ const [tiles, setTiles] = useState({});
+
+ const generateQueryKey = (params) => {
+ return [loginsCountByKey, { params }];
+ };
+
+ const { refetch: getAllLoginsCount } = useQuery(
+ generateQueryKey({
+ 'tenenv_id': parameters['tenenvId'],
+ 'unique_logins': parameters['uniqueLogins'],
+ 'idpId': parameters['idpId'] !== undefined ? parameters['idpId'] : null,
+ 'spId': parameters['spId'] !== undefined ? parameters['spId'] : null
+ }),
+ getLoginsCountBy,
+ { enabled: false, refetchOnWindowFocus: false }
+ );
+
+ const { refetch: getLastYearLoginsCount } = useQuery(
+ generateQueryKey({
+ 'interval': 'year',
+ 'count_interval': '1',
+ 'tenenv_id': parameters['tenenvId'],
+ 'unique_logins': parameters['uniqueLogins'],
+ 'idpId': parameters['idpId'] !== undefined ? parameters['idpId'] : null,
+ 'spId': parameters['spId'] !== undefined ? parameters['spId'] : null
+ }),
+ getLoginsCountBy,
+ { enabled: false, refetchOnWindowFocus: false }
+ );
+
+ const { refetch: getLastMonthLoginsCount } = useQuery(
+ generateQueryKey({
+ 'interval': 'days',
+ 'count_interval': '30',
+ 'tenenv_id': parameters['tenenvId'],
+ 'unique_logins': parameters['uniqueLogins'],
+ 'idpId': parameters['idpId'] !== undefined ? parameters['idpId'] : null,
+ 'spId': parameters['spId'] !== undefined ? parameters['spId'] : null
+ }),
+ getLoginsCountBy,
+ { enabled: false, refetchOnWindowFocus: false }
+ );
+
+ const { refetch: getLastWeekLoginsCount } = useQuery(
+ generateQueryKey({
+ 'interval': 'days',
+ 'count_interval': '7',
+ 'tenenv_id': parameters['tenenvId'],
+ 'unique_logins': parameters['uniqueLogins'],
+ 'idpId': parameters['idpId'] !== undefined ? parameters['idpId'] : null,
+ 'spId': parameters['spId'] !== undefined ? parameters['spId'] : null
+ }),
+ getLoginsCountBy,
+ { enabled: false, refetchOnWindowFocus: false }
+ );
+
+ const { refetch: getYesterdayLoginsCount } = useQuery(
+ generateQueryKey({
+ 'interval': 'days',
+ 'count_interval': '2',
+ 'tenenv_id': parameters['tenenvId'],
+ 'unique_logins': parameters['uniqueLogins'],
+ 'idpId': parameters['idpId'] !== undefined ? parameters['idpId'] : null,
+ 'spId': parameters['spId'] !== undefined ? parameters['spId'] : null
+ }),
+ getLoginsCountBy,
+ { enabled: false, refetchOnWindowFocus: false }
+ );
+
+
+ useEffect(() => {
+ const handleRefetch = async () => {
+
+ const results = await Promise.all([
+
+ getLastYearLoginsCount()
+ .then((response) =>
+ ({
+ response, params:
+ {
+ 'interval': 'year',
+ 'count_interval': '1',
+ 'tenenv_id': parameters['tenenvId'],
+ 'unique_logins': parameters['uniqueLogins'],
+ 'idpId': parameters['idpId'] !== undefined ? parameters['idpId'] : null,
+ 'spId': parameters['spId'] !== undefined ? parameters['spId'] : null
+ }
+ })
+ ),
+ getLastMonthLoginsCount()
+ .then((response) =>
+ ({
+ response, params:
+ {
+ 'interval': 'days',
+ 'count_interval': '30',
+ 'tenenv_id': parameters['tenenvId'],
+ 'unique_logins': parameters['uniqueLogins'],
+ 'idpId': parameters['idpId'] !== undefined ? parameters['idpId'] : null,
+ 'spId': parameters['spId'] !== undefined ? parameters['spId'] : null
+ }
+ })
+ ),
+ getLastWeekLoginsCount()
+ .then((response) =>
+ ({
+ response, params:
+ {
+ 'interval': 'days',
+ 'count_interval': '7',
+ 'tenenv_id': parameters['tenenvId'],
+ 'unique_logins': parameters['uniqueLogins'],
+ 'idpId': parameters['idpId'] !== undefined ? parameters['idpId'] : null,
+ 'spId': parameters['spId'] !== undefined ? parameters['spId'] : null
+ }
+ })
+ ),
+ getYesterdayLoginsCount()
+ .then((response) =>
+ ({
+ response, params:
+ {
+ 'interval': 'days',
+ 'count_interval': '2',
+ 'tenenv_id': parameters['tenenvId'],
+ 'unique_logins': parameters['uniqueLogins'],
+ 'idpId': parameters['idpId'] !== undefined ? parameters['idpId'] : null,
+ 'spId': parameters['spId'] !== undefined ? parameters['spId'] : null
+ }
+ })
+ )
+ ])
+
+ // Log the data to the console
+ // You would do something with both sets of data here
+ var tilesArray = {}
+ results.forEach(({ response, params }, index) => {
+ const { data } = response;
+
+ data.forEach(element => {
+ if (params["interval"]) {
+ var name = params["interval"] + "_" + params["count_interval"]
+ tilesArray[[name]] = (element["count"] != null) ? element["count"] : 0
+ }
+ else {
+ tilesArray["overall"] = (element["count"] != null) ? element["count"] : 0
+ }
+
+ })
+ });
+ setTiles(tilesArray)
+
+ }
+ handleRefetch()
+
+ }, [parameters["uniqueLogins"]])
+
+ return (
+
+
+
+
+
+
+
+
{tiles["year_1"]}
+
Last Year Logins
+
+
+
+
+
+
+
{tiles["days_30"]}
+
Last 30 days Logins
+
+
+
+
+
+
+
{tiles["days_7"]}
+
Last 7 days Logins
+
+
+
+
+
+
+
{tiles["days_2"]}
+
Yesterday's Logins
+
+
+
+
+
+ )
+}
+
+export default LoginTiles
\ No newline at end of file
diff --git a/javascript/src/components/Dashboard/loginsMap.js b/javascript/src/components/Dashboard/loginsMap.js
new file mode 100644
index 0000000..435369d
--- /dev/null
+++ b/javascript/src/components/Dashboard/loginsMap.js
@@ -0,0 +1,85 @@
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import 'jquery-mapael';
+import 'jquery-mapael/js/maps/world_countries_mercator.js';
+import {useQuery, useQueryClient} from "react-query";
+import {loginsPerCountryKey, minDateLoginsKey} from "../../utils/queryKeys";
+import {getLoginsPerCountry, getMinDateLogins} from "../../utils/queries";
+import EarthMap from "../Common/earthMap";
+import DateRange from "../Common/dateRange";
+import Spinner from "../Common/spinner";
+import React from "react";
+import {format} from "date-fns";
+
+const LoginsMap = ({
+ startDate,
+ endDate,
+ tenenvId,
+ uniqueLogins
+ }) => {
+
+ let params = {
+ params: {
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'tenenv_id': tenenvId,
+ 'unique_logins': uniqueLogins
+ }
+ }
+
+ const loginsPerCountry = useQuery(
+ [loginsPerCountryKey, params],
+ getLoginsPerCountry,
+ {
+ // enabled: false, this caused problems rendering map
+ refetchOnWindowFocus: false
+ }
+ )
+
+ let paramsMinDate = {
+ params: {
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'tenenv_id': tenenvId,
+ 'unique_logins': true
+ }
+ }
+
+ const minDateLogins = useQuery(
+ [minDateLoginsKey, paramsMinDate],
+ getMinDateLogins,
+ {
+ enabled: false,
+ refetchOnWindowFocus: false
+ }
+ )
+
+ if (loginsPerCountry.isLoading
+ || loginsPerCountry.isFetching) {
+ return ( )
+ }
+
+ if(loginsPerCountry.length === 0) {
+ return null
+ }
+
+ return (
+
+
+
+
+
+
+
+
+
+
+ )
+}
+
+export default LoginsMap;
\ No newline at end of file
diff --git a/javascript/src/components/Idps/idpMap.js b/javascript/src/components/Idps/idpMap.js
new file mode 100644
index 0000000..372688e
--- /dev/null
+++ b/javascript/src/components/Idps/idpMap.js
@@ -0,0 +1,58 @@
+import React from "react";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import 'jquery-mapael';
+import 'jquery-mapael/js/maps/world_countries_mercator.js';
+import {useQuery} from "react-query";
+import {loginsPerCountryKey} from "../../utils/queryKeys";
+import {getLoginsPerCountry} from "../../utils/queries";
+import EarthMap from "../Common/earthMap";
+import Spinner from "../Common/spinner";
+import {format} from "date-fns";
+
+const IdpMap = ({
+ tenenvId,
+ idpId,
+ uniqueLogins,
+ startDate,
+ endDate
+ }) => {
+
+ let params = {
+ params: {
+ 'tenenv_id': tenenvId,
+ 'unique_logins': uniqueLogins,
+ 'idpId': idpId,
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ }
+ }
+ const loginsPerCountry = useQuery(
+ [loginsPerCountryKey, params],
+ getLoginsPerCountry
+ )
+
+ if (loginsPerCountry.isLoading
+ || loginsPerCountry.isFetching) {
+ return ( )
+ }
+
+ if(loginsPerCountry.length === 0) {
+ return null
+ }
+
+ return (
+
+
+
+
Logins Per Country
+
+
+
+
+ )
+}
+
+export default IdpMap;
\ No newline at end of file
diff --git a/javascript/src/components/Idps/idpMapToDataTable.js b/javascript/src/components/Idps/idpMapToDataTable.js
new file mode 100644
index 0000000..7f2c435
--- /dev/null
+++ b/javascript/src/components/Idps/idpMapToDataTable.js
@@ -0,0 +1,106 @@
+import React, {useState, useEffect} from "react";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import $ from "jquery";
+import "jquery/dist/jquery.min.js";
+import Datatable from "../datatable";
+import 'jquery-mapael';
+import 'jquery-mapael/js/maps/world_countries_mercator.js';
+import {useQuery, useQueryClient} from "react-query";
+import {loginsPerCountryKey} from "../../utils/queryKeys";
+import {getLoginsPerCountry} from "../../utils/queries";
+import Spinner from "../Common/spinner";
+import {format} from "date-fns";
+
+const IdpMapToDataTable = ({
+ startDate,
+ endDate,
+ tenenvId,
+ uniqueLogins,
+ idpId
+ }) => {
+ const [loginsPerCountryData, setLoginsPerCountryData] = useState([]);
+ const queryClient = useQueryClient();
+
+ let params = {
+ params: {
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'tenenv_id': tenenvId,
+ 'unique_logins': uniqueLogins,
+ 'idpId': idpId
+ },
+
+ }
+
+ const loginsPerCountry = useQuery(
+ [loginsPerCountryKey, params],
+ getLoginsPerCountry,
+ {
+ enabled: false,
+ refetchOnWindowFocus: false
+ }
+ )
+
+ useEffect(() => {
+ params = {
+ params: {
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'tenenv_id': tenenvId,
+ 'unique_logins': uniqueLogins,
+ 'idpId': idpId
+ },
+
+ }
+
+ try {
+ const response = queryClient.refetchQueries([loginsPerCountryKey, params])
+ } catch (error) {
+ // todo: Here we can handle any authentication or authorization errors
+ console.log(error)
+ }
+
+ }, [uniqueLogins])
+
+ // Construct the data required for the datatable
+ useEffect(() => {
+ const loginsPerCountryArray = !loginsPerCountry.isLoading
+ && !loginsPerCountry.isFetching
+ && loginsPerCountry.isSuccess
+ && loginsPerCountry?.data?.map(element => ({
+ "Countries": element.country,
+ "Number of Logins": element.sum
+ }))
+
+ if (!!loginsPerCountry?.data && !!loginsPerCountryArray) {
+ $("#table-idp").DataTable().destroy()
+ setLoginsPerCountryData(loginsPerCountryArray)
+ }
+ }, [!loginsPerCountry.isLoading
+ && !loginsPerCountry.isFetching
+ && loginsPerCountry.isSuccess])
+
+ if (loginsPerCountry.isLoading
+ || loginsPerCountry.isFetching) {
+ return ( )
+ }
+
+ if (loginsPerCountryData.length === 0) {
+ return null
+ }
+
+ return (
+
+
+
+
Logins Per Country
+
+
+
+
+ )
+}
+
+export default IdpMapToDataTable;
\ No newline at end of file
diff --git a/javascript/src/components/Idps/idpsDataTable.js b/javascript/src/components/Idps/idpsDataTable.js
new file mode 100644
index 0000000..13b0154
--- /dev/null
+++ b/javascript/src/components/Idps/idpsDataTable.js
@@ -0,0 +1,176 @@
+import React, {useState, useEffect} from "react";
+import "jquery/dist/jquery.min.js";
+import $ from "jquery";
+import Datatable from "../datatable";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import Button from 'react-bootstrap/Button';
+import DatePicker from "react-datepicker";
+import 'react-toastify/dist/ReactToastify.css';
+import 'react-dropdown/style.css';
+import "react-datepicker/dist/react-datepicker.css";
+import {useQuery, useQueryClient} from "react-query";
+import {loginsPerIdpKey, minDateLoginsKey} from "../../utils/queryKeys";
+import {getLoginsPerIdp, getMinDateLogins} from "../../utils/queries";
+import {useCookies} from "react-cookie";
+import {createAnchorElement, formatStartDate, formatEndDate} from "../Common/utils";
+import {toast} from "react-toastify";
+import Spinner from "../Common/spinner";
+import {format} from "date-fns";
+
+const IdpsDataTable = ({
+ spId,
+ dataTableId = "table-idp",
+ tenenvId,
+ uniqueLogins,
+ setStartDate,
+ setEndDate,
+ startDate,
+ endDate
+ }) => {
+ const [cookies, setCookie] = useCookies();
+ const permissions = cookies.permissions
+ const tenant = cookies['x-tenant']
+ const environment = cookies['x-environment']
+
+ const [idpsLogins, setIdpsLogins] = useState([]);
+ const [minDate, setMinDate] = useState("");
+ const [btnPressed, setBtnPressed] = useState(false);
+ const queryClient = useQueryClient();
+
+ let params = {
+ params: {
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'sp': spId,
+ 'tenenv_id': tenenvId,
+ 'unique_logins': uniqueLogins
+ },
+ }
+
+ const loginsPerIpd = useQuery(
+ [loginsPerIdpKey, params],
+ getLoginsPerIdp,
+ {
+ enabled: false,
+ refetchOnWindowFocus: false
+ }
+ )
+
+ const minDateLogins = useQuery(
+ [minDateLoginsKey, params],
+ getMinDateLogins,
+ {
+ enabled: false,
+ refetchOnWindowFocus: false
+ }
+ )
+
+ useEffect(() => {
+ params = {
+ params: {
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'sp': spId,
+ 'tenenv_id': tenenvId,
+ 'unique_logins': uniqueLogins
+ },
+ }
+
+ try {
+ const response = queryClient.refetchQueries([loginsPerIdpKey, params])
+ queryClient.refetchQueries([minDateLoginsKey, {params:{tenenv_id: tenenvId}}])
+ } catch (error) {
+ // todo: Here we can handle any authentication or authorization errors
+ console.log(error)
+ }
+
+ }, [uniqueLogins, btnPressed])
+
+ // Construct the data required for the datatable
+ useEffect(() => {
+ const perIdp = !loginsPerIpd.isLoading
+ && !loginsPerIpd.isFetching
+ && loginsPerIpd.isFetched
+ && loginsPerIpd.isSuccess
+ && loginsPerIpd?.data?.map(idp => ({
+ "Identity Provider Name": (cookies.userinfo == undefined && !!permissions?.actions?.identity_providers?.['view']) ? idp.name : createAnchorElement(idp.name, `/metrics/identity-providers/${idp.id}`),
+ "Identity Provider Identifier": idp.entityid,
+ "Number of Logins": idp.count
+ }))
+
+ if (!!loginsPerIpd?.data && !!perIdp) {
+ // This is essential: We must destroy the datatable in order to be refreshed with the new data
+ if (minDate == undefined || minDate == "") {
+ setMinDate(!!minDateLogins?.data?.min_date ? new Date(minDateLogins?.data?.min_date) : null)
+ }
+ $("#" + dataTableId).DataTable().destroy()
+ setIdpsLogins(perIdp)
+ }
+ }, [uniqueLogins, loginsPerIpd.isSuccess && minDateLogins.isSuccess])
+
+ const handleStartDateChange = (date) => {
+
+ date = formatStartDate(date);
+ if (date != null) {
+ setStartDate(date);
+ }
+ };
+
+ const handleEndDateChange = (date) => {
+
+ //date = formatEndDate(date);
+ if (date != null) {
+ setEndDate(date);
+ }
+ };
+
+
+ const handleBtnclick = () => {
+ if (!startDate || !endDate) {
+ toast.warning("You have to fill both startDate and endDate")
+ return
+ }
+ setBtnPressed((prev) => !prev)
+ }
+
+ if (loginsPerIpd.isLoading
+ || loginsPerIpd.isFetching) {
+ return ( )
+ }
+
+ return (
+
+
+
+
Number of logins
+
+
+
+ From:
+ To:
+ {/* Probably add a tooltip here that both fields are required */}
+
+ Filter
+
+
+
+
+
+
+ )
+
+}
+
+export default IdpsDataTable
\ No newline at end of file
diff --git a/javascript/src/components/Sps/spMap.js b/javascript/src/components/Sps/spMap.js
new file mode 100644
index 0000000..43b3f02
--- /dev/null
+++ b/javascript/src/components/Sps/spMap.js
@@ -0,0 +1,57 @@
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import 'jquery-mapael';
+import 'jquery-mapael/js/maps/world_countries_mercator.js';
+import {loginsPerCountryKey, loginsPerIdpKey} from "../../utils/queryKeys";
+import {getLoginsPerCountry} from "../../utils/queries";
+import {useQuery, useQueryClient} from "react-query";
+import EarthMap from "../Common/earthMap";
+import Spinner from "../Common/spinner";
+import React from "react";
+import {format} from "date-fns";
+
+const SpMap = ({
+ startDate,
+ endDate,
+ tenenvId,
+ uniqueLogins,
+ spId
+ }) => {
+ let params = {
+ params: {
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'tenenv_id': tenenvId,
+ 'unique_logins': uniqueLogins,
+ 'spId': spId
+ }
+ }
+ const loginsPerCountry = useQuery(
+ [loginsPerCountryKey, params],
+ getLoginsPerCountry
+ )
+
+ if (loginsPerCountry.isLoading
+ || loginsPerCountry.isFetching) {
+ return ( )
+ }
+
+ if (loginsPerCountry.length === 0) {
+ return null
+ }
+
+ return (
+
+
+
+
Logins Per Country
+
+
+
+
+ )
+}
+
+export default SpMap;
\ No newline at end of file
diff --git a/javascript/src/components/Sps/spMapToDataTable.js b/javascript/src/components/Sps/spMapToDataTable.js
new file mode 100644
index 0000000..fb52988
--- /dev/null
+++ b/javascript/src/components/Sps/spMapToDataTable.js
@@ -0,0 +1,62 @@
+import React from "react";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import $ from "jquery";
+import "jquery/dist/jquery.min.js";
+import Datatable from "../datatable";
+import 'jquery-mapael';
+import 'jquery-mapael/js/maps/world_countries_mercator.js';
+import {useQuery} from "react-query";
+import {loginsPerCountryKey} from "../../utils/queryKeys";
+import {getLoginsPerCountry} from "../../utils/queries";
+import {format} from "date-fns";
+
+const SpMapToDataTable = ({
+ startDate,
+ endDate,
+ tenenvId,
+ uniqueLogins,
+ spId
+ }) => {
+
+ let params = {
+ params: {
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'tenenv_id': tenenvId,
+ 'unique_logins': uniqueLogins,
+ 'spId': spId
+ }
+ }
+
+ const loginsPerCountry = useQuery(
+ [loginsPerCountryKey, params],
+ getLoginsPerCountry
+ )
+
+ const loginsPerCountryArray = !loginsPerCountry.isLoading
+ && !loginsPerCountry.isFetching
+ && loginsPerCountry.isSuccess
+ && loginsPerCountry?.data?.map(element => ({
+ "Countries": element.country,
+ "Number of Logins": element.sum
+ }))
+
+ if (!!loginsPerCountryArray) {
+ $("#table-sp").DataTable().destroy()
+ }
+
+ return (
+
+
+
+
Logins Per Country
+
+
+
+
+ )
+}
+
+export default SpMapToDataTable;
\ No newline at end of file
diff --git a/javascript/src/components/Sps/spsDataTable.js b/javascript/src/components/Sps/spsDataTable.js
new file mode 100644
index 0000000..73f5bd2
--- /dev/null
+++ b/javascript/src/components/Sps/spsDataTable.js
@@ -0,0 +1,165 @@
+import React, {useState, useEffect} from "react";
+import "jquery/dist/jquery.min.js";
+import $ from "jquery";
+import Datatable from "../datatable";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import Button from 'react-bootstrap/Button';
+import DatePicker from "react-datepicker";
+import 'react-toastify/dist/ReactToastify.css';
+import 'react-dropdown/style.css';
+import "react-datepicker/dist/react-datepicker.css";
+import {useQuery, useQueryClient} from "react-query";
+import {loginsPerSpKey, minDateLoginsKey} from "../../utils/queryKeys";
+import {getLoginsPerSP, getMinDateLogins} from "../../utils/queries";
+import {useCookies} from "react-cookie";
+import {createAnchorElement, formatStartDate, formatEndDate} from "../Common/utils";
+import Spinner from "../Common/spinner";
+import {format} from "date-fns";
+
+const SpsDataTable = ({
+ idpId,
+ dataTableId = "table-sp",
+ tenenvId,
+ uniqueLogins,
+ setStartDate,
+ setEndDate,
+ startDate,
+ endDate
+ }) => {
+ const [cookies, setCookie] = useCookies();
+ const permissions = cookies.permissions
+ const tenant = cookies['x-tenant']
+ const environment = cookies['x-environment']
+
+ const [spsLogins, setSpsLogins] = useState([]);
+ const [minDate, setMinDate] = useState("");
+ const [btnPressed, setBtnPressed] = useState(false);
+ const queryClient = useQueryClient();
+
+ let params = {
+ params: {
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'idp': idpId,
+ 'tenenv_id': tenenvId,
+ 'unique_logins': uniqueLogins
+ },
+ }
+
+ const loginsPerSp = useQuery(
+ [loginsPerSpKey, params],
+ getLoginsPerSP,
+ {
+ enabled: false
+ }
+ )
+
+ const minDateLogins = useQuery(
+ [minDateLoginsKey, params],
+ getMinDateLogins,
+ {
+ enabled: false,
+ refetchOnWindowFocus: false
+ }
+ )
+
+ useEffect(() => {
+ params = {
+ params: {
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'idp': idpId,
+ 'tenenv_id': tenenvId,
+ 'unique_logins': uniqueLogins
+ },
+ }
+
+ try {
+ const response = queryClient.refetchQueries([loginsPerSpKey, params])
+ queryClient.refetchQueries([minDateLoginsKey, {params:{tenenv_id: tenenvId}}])
+ } catch (error) {
+ // todo: Here we can handle any authentication or authorization errors
+ console.log(error)
+ }
+
+ }, [uniqueLogins, btnPressed])
+
+ // Construct the data required for the datatable
+ useEffect(() => {
+ const perSp = !loginsPerSp.isLoading
+ && !loginsPerSp.isFetching
+ && loginsPerSp.isSuccess
+ && loginsPerSp?.data?.map(sp => ({
+ "Service Provider Name": (cookies.userinfo == undefined && !!permissions?.actions?.service_providers?.['view']) ? sp.name : createAnchorElement(sp.name, `/metrics/services/${sp.id}`),
+ "Service Provider Identifier": sp.identifier,
+ "Number of Logins": sp.count
+ }))
+
+ if (!!loginsPerSp?.data && !!perSp) {
+ if (minDate == undefined || minDate == "") {
+ setMinDate(!!minDateLogins?.data?.min_date ? new Date(minDateLogins?.data?.min_date) : null)
+ }
+ // This is essential: We must destroy the datatable in order to be refreshed with the new data
+ $("#" + dataTableId).DataTable().destroy()
+ setSpsLogins(perSp)
+ }
+ }, [uniqueLogins, loginsPerSp.isSuccess && minDateLogins.isSuccess])
+
+ const handleStartDateChange = (date) => {
+
+ date = formatStartDate(date);
+ if (date != null) {
+ setStartDate(date);
+ }
+ };
+
+ const handleEndDateChange = (date) => {
+
+ //date = formatEndDate(date);
+ if (date != null) {
+ setEndDate(date);
+ }
+ };
+
+
+ if (loginsPerSp.isLoading
+ || loginsPerSp.isFetching) {
+ return ( )
+ }
+
+ return (
+
+
+
+
Number of logins
+
+
+
+ From:
+ To:
+ {/* Probably add a tooltip here that both fields are required */}
+ setBtnPressed((prev) => !prev)}>
+ Filter
+
+
+
+
+
+
+ )
+
+}
+
+export default SpsDataTable
\ No newline at end of file
diff --git a/javascript/src/components/Users/registeredUsersChart.js b/javascript/src/components/Users/registeredUsersChart.js
new file mode 100644
index 0000000..dd54e49
--- /dev/null
+++ b/javascript/src/components/Users/registeredUsersChart.js
@@ -0,0 +1,138 @@
+import React, {useState, useEffect} from "react";
+import {Chart} from "react-google-charts";
+import {
+ axisChartOptions,
+ convertDateByGroup,
+ getWeekNumber
+} from "../Common/utils";
+import Select from 'react-select';
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import 'bootstrap/dist/css/bootstrap.min.css';
+import {
+ regUsersOptions,
+ options_group_by
+} from "../../utils/helpers/enums";
+import {useQuery, useQueryClient} from "react-query";
+import {registeredUsersGroupByKey} from "../../utils/queryKeys";
+import {getRegisteredUsersGroupBy} from "../../utils/queries";
+
+const RegisteredUsersChart = ({
+ tenenvId
+ }) => {
+ const [selected, setSelected] = useState(options_group_by[0].value);
+ const [registeredUsers, setRegisteredUsers] = useState([]);
+ const [global_options, setGlobalOptions] = useState();
+ const queryClient = useQueryClient();
+ const controller = new AbortController
+
+ let params = {
+ params: {
+ 'interval': selected,
+ 'count_interval': regUsersOptions[selected]["count_interval"],
+ 'tenenv_id': tenenvId
+ },
+ signa: controller.signal
+ }
+
+ const registeredUsersGroup = useQuery(
+ [registeredUsersGroupByKey, {groupBy: selected, params: params}],
+ getRegisteredUsersGroupBy,
+ {
+ /*enabled: false, this caused problems of fetching data*/
+ refetchOnWindowFocus: false
+ }
+ )
+
+ useEffect(() => {
+ params = {
+ params: {
+ 'interval': selected,
+ 'count_interval': regUsersOptions[selected]["count_interval"],
+ 'tenenv_id': tenenvId,
+ },
+ signal: controller.signal
+ }
+
+ try {
+ const response = queryClient.refetchQueries([registeredUsersGroupByKey, {groupBy: selected, params: params}])
+ } catch (error) {
+ // todo: Here we can handle any authentication or authorization errors
+ console.log(error)
+ }
+
+ return () => {
+ controller.abort()
+ }
+
+ }, [selected, tenenvId])
+
+
+ // Construct the data required for the datatable
+ useEffect(() => {
+ if (!registeredUsersGroup.isLoading
+ && !registeredUsersGroup.isFetching
+ && registeredUsersGroup.isSuccess
+ && !!registeredUsersGroup.data) {
+
+ const hticksArray = registeredUsersGroup?.data?.map(element => ({
+ v: new Date(element?.range_date),
+ f: selected === "week" ? getWeekNumber(new Date(element?.range_date)) : new Date(element?.range_date)
+ })
+ )
+
+ let fValues = [
+ ['Date',
+ 'Count',
+ {
+ 'type': 'string',
+ 'role': 'tooltip',
+ 'p': {'html': true}
+ }
+ ]
+ ]
+
+ const charData = registeredUsersGroup?.data?.map(element => ([
+ new Date(element?.range_date),
+ parseInt(element['count']),
+ `${convertDateByGroup(new Date(element?.range_date), selected)} Communities: ${parseInt(element['count'])}
`
+ ])
+ )
+
+ setRegisteredUsers(fValues.concat(charData))
+ setGlobalOptions(axisChartOptions(regUsersOptions[selected]["title"],
+ regUsersOptions[selected]["hAxis"]["format"],
+ hticksArray))
+ }
+ }, [!registeredUsersGroup.isLoading
+ && !registeredUsersGroup.isFetching
+ && registeredUsersGroup.isSuccess])
+
+ // XXX Google Chart will not work if we return empty and then
+ // try to reload
+ // if (registeredUsersGroup.isLoading
+ // || registeredUsersGroup.isFetching
+ // || registeredUsers?.length === 0) {
+ // return null
+ // }
+
+ return
+
+
Number of Registered Users
+
+ Select Period:
+ setSelected(event?.value)}/>
+
+
+ Data loading}
+ data={registeredUsers}
+ options={global_options}/>
+
+
+}
+
+export default RegisteredUsersChart
\ No newline at end of file
diff --git a/javascript/src/components/Users/registeredUsersDataTable.js b/javascript/src/components/Users/registeredUsersDataTable.js
new file mode 100644
index 0000000..f5c7302
--- /dev/null
+++ b/javascript/src/components/Users/registeredUsersDataTable.js
@@ -0,0 +1,199 @@
+import React, {useState, useEffect, useRef} from "react";
+import "jquery/dist/jquery.min.js";
+import $ from "jquery";
+import Datatable from "../datatable";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import DatePicker from "react-datepicker";
+import Dropdown from 'react-dropdown';
+import {toast} from 'react-toastify';
+import {dropdownOptions} from "../../utils/helpers/enums";
+import 'react-toastify/dist/ReactToastify.css';
+import 'react-dropdown/style.css';
+import "react-datepicker/dist/react-datepicker.css";
+import {useQuery, useQueryClient} from "react-query";
+import {loginsPerIdpKey, minDateRegisteredUsersKey, registeredUsersPerCountryGroupByKey} from "../../utils/queryKeys";
+import {getMinDateRegisteredUsers, getRegisteredUsersPerCountryGroupBy} from "../../utils/queries";
+import {useCookies} from "react-cookie";
+import Spinner from "../Common/spinner";
+import {format} from "date-fns";
+import {convertDateByGroup, formatStartDate, formatEndDate} from "../Common/utils";
+
+const RegisteredUsersDataTable = ({
+ tenenvId,
+ setStartDate,
+ setEndDate,
+ startDate,
+ endDate
+ }) => {
+ const dropdownRef = useRef(null);
+ const [cookies, setCookie] = useCookies();
+ const [usersPerCountryPerPeriod, setUsersPerCountryPerPeriod] = useState([]);
+ const [dropdownOptionsState, setDropdownOptions] = useState(dropdownOptions);
+ const [minDate, setMinDate] = useState("");
+ const [groupBy, setGroupBy] = useState("month")
+ const queryClient = useQueryClient();
+ const controller = new AbortController();
+
+
+ let params = {
+ params: {
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'tenenv_id': tenenvId
+ }
+ }
+
+ const registeredUsersPerCountryGroup = useQuery(
+ [registeredUsersPerCountryGroupByKey, {groupBy: groupBy, params: params}],
+ getRegisteredUsersPerCountryGroupBy,
+ {
+ /*enabled: false, this caused problems of fetching data*/
+ refetchOnWindowFocus: false
+ }
+ )
+
+ const minDateRegisteredUsers = useQuery(
+ [minDateRegisteredUsersKey, params],
+ getMinDateRegisteredUsers,
+ {
+ enabled: false,
+ refetchOnWindowFocus: false
+ }
+ )
+
+
+ useEffect(() => {
+ if (groupBy == '') {
+ return;
+ }
+ params = {
+ params: {
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'tenenv_id': tenenvId,
+ 'groupBy': groupBy
+ },
+ signal: controller.signal
+ }
+
+ try {
+ const response = queryClient.refetchQueries([registeredUsersPerCountryGroupByKey, {
+ groupBy: groupBy,
+ params: params
+ }])
+ queryClient.refetchQueries([minDateRegisteredUsersKey, {params:{tenenv_id: tenenvId}}])
+ console.log(params)
+ } catch (error) {
+ // todo: Here we can handle any authentication or authorization errors
+ console.log(error)
+ }
+
+ return () => {
+ controller.abort()
+ }
+
+ }, [groupBy])
+
+ // Construct the data required for the datatable
+ useEffect(() => {
+ console.log(registeredUsersPerCountryGroup)
+ const perPeriod = registeredUsersPerCountryGroup?.data?.map(user => ({
+ "Date": !!user?.range_date ? convertDateByGroup(new Date(user?.range_date), groupBy): null,
+ "Number of Registered Users": user?.count,
+ "Registered Users per country": user?.countries
+ }))
+
+ if (!!registeredUsersPerCountryGroup?.data && !!perPeriod) {
+ // This is essential: We must destroy the datatable in order to be refreshed with the new data
+ if (minDate == undefined || minDate == "") {
+ setMinDate(!!minDateRegisteredUsers?.data?.min_date ? new Date(minDateRegisteredUsers?.data?.min_date) : null)
+ }
+ $("#table-users").DataTable().destroy()
+ setUsersPerCountryPerPeriod(perPeriod)
+ }
+ }, [registeredUsersPerCountryGroup.isSuccess && minDateRegisteredUsers.isSuccess, groupBy])
+
+
+ const handleAddOption = () => {
+ // Create a new option dynamically
+ const newOption = {value: '', label: 'Filter'};
+
+ // Check if the new option already exists in the options array
+ if (!dropdownOptionsState.some(option => option.value === newOption.value)) {
+ // If it doesn't exist, add it to the options array
+ setDropdownOptions([newOption, ...dropdownOptionsState]);
+ }
+ };
+
+ const handleStartDateChange = (date) => {
+ if(groupBy!=''){
+ handleAddOption()
+ }
+ date = formatStartDate(date);
+ if(date != null) {
+ if(endDate!=date){
+ setGroupBy("")
+ }
+ setStartDate(date);
+ dropdownRef.current.state.selected.label = 'Filter';
+ }
+
+ };
+
+ const handleEndDateChange = (date) => {
+ if(groupBy!=''){
+ handleAddOption()
+ }
+ //date = formatEndDate(date);
+ if(date != null) {
+ if(endDate!=date){
+ setGroupBy("")
+ }
+ setEndDate(date);
+ dropdownRef.current.state.selected.label = 'Filter';
+ }
+
+ };
+
+ const handleChange = (event) => {
+ if (!startDate || !endDate) {
+ toast.warning("You have to fill both startDate and endDate")
+ return
+ }
+ setGroupBy(event.value)
+ };
+
+ return (
+
+
+
+
Number of logins
+
+
+
+ From:
+ To:
+
+
+
+ {
+
+ }
+
+
+ )
+}
+
+export default RegisteredUsersDataTable
\ No newline at end of file
diff --git a/javascript/src/components/Users/registeredUsersMap.js b/javascript/src/components/Users/registeredUsersMap.js
new file mode 100644
index 0000000..72d9ede
--- /dev/null
+++ b/javascript/src/components/Users/registeredUsersMap.js
@@ -0,0 +1,79 @@
+import React, {useEffect} from "react";
+import {useQuery, useQueryClient} from "react-query";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import 'jquery-mapael';
+import 'jquery-mapael/js/maps/world_countries_mercator.js';
+import {getRegisteredUsersByCountry} from "../../utils/queries";
+import {registeredUsersByCountryKey} from "../../utils/queryKeys";
+import EarthMap from "../Common/earthMap";
+import Spinner from "../Common/spinner";
+import {format} from "date-fns";
+
+const RegisteredUsersMap = ({
+ startDate,
+ endDate,
+ tenenvId
+ }) => {
+ const queryClient = useQueryClient();
+ const controller = new AbortController
+
+ let params = {
+ params: {
+ 'startDate': !startDate ? null : format(startDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'endDate': !endDate ? null : format(endDate, "yyyy-MM-dd'T'HH:mm:ss'Z'"),
+ 'tenenv_id': tenenvId,
+ },
+ signal: controller.signal
+ }
+
+ const registeredUsersByCountry = useQuery(
+ [registeredUsersByCountryKey, params],
+ getRegisteredUsersByCountry,
+ {
+ enabled: false,
+ refetchOnWindowFocus: false
+ }
+ )
+
+ useEffect(() => {
+ try {
+ var response = queryClient.refetchQueries([registeredUsersByCountryKey, params])
+ } catch (error) {
+ console.log(error)
+ }
+
+ return () => {
+ controller.abort()
+ }
+ }, [startDate, endDate, tenenvId])
+
+ if (registeredUsersByCountry.isLoading
+ || registeredUsersByCountry.isFetching
+ || registeredUsersByCountry.isRefetching) {
+ return ( )
+ }
+
+ if (registeredUsersByCountry.isIdle) {
+ return null
+ }
+
+ return (
+
+
+
+
Users Per Country
+
+
+
+
+
+
+
+
+ )
+}
+
+export default RegisteredUsersMap;
\ No newline at end of file
diff --git a/javascript/src/components/Users/registeredUsersTiles.js b/javascript/src/components/Users/registeredUsersTiles.js
new file mode 100644
index 0000000..1d2829e
--- /dev/null
+++ b/javascript/src/components/Users/registeredUsersTiles.js
@@ -0,0 +1,117 @@
+import React, { useState, useEffect } from "react";
+import { useQuery } from "react-query";
+import Row from 'react-bootstrap/Row';
+import Col from 'react-bootstrap/Col';
+import { registeredUsersCountByKey } from "../../utils/queryKeys";
+import { getRegisteredUsersCountby } from "../../utils/queries";
+
+
+
+const RegisteredUsersTiles = (parameters) => {
+ const [tiles, setTiles] = useState({});
+
+ const generateQueryKey = (params) => {
+ return [registeredUsersCountByKey, { params }];
+ };
+
+ const { refetch: getAllRegisteredUsersCount } = useQuery(
+ generateQueryKey({ tenenv_id: parameters['tenenvId'] }),
+ getRegisteredUsersCountby,
+ { enabled: false, refetchOnWindowFocus: false }
+ );
+ const { refetch: getLastYearRegisteredUsersCount } = useQuery(
+ generateQueryKey({ interval: 'year', count_interval: '1', tenenv_id: parameters['tenenvId'] }),
+ getRegisteredUsersCountby,
+ { enabled: false, refetchOnWindowFocus: false }
+ );
+ const { refetch: getLastMonthRegisteredUsersCount } = useQuery(
+ generateQueryKey({ interval: 'days', count_interval: '30', tenenv_id: parameters['tenenvId'] }),
+ getRegisteredUsersCountby,
+ { enabled: false, refetchOnWindowFocus: false }
+ );
+ const { refetch: getLastWeekRegisteredUsersCount } = useQuery(
+ generateQueryKey({ interval: 'days', count_interval: '7', tenenv_id: parameters['tenenvId'] }),
+ getRegisteredUsersCountby,
+ { enabled: false, refetchOnWindowFocus: false }
+ );
+
+ useEffect(() => {
+ const handleRefetch = async () => {
+
+ const results = await Promise.all([
+ getAllRegisteredUsersCount()
+ .then((response) =>
+ ({ response, params: { tenenv_id: parameters['tenenvId'] } })),
+ getLastYearRegisteredUsersCount()
+ .then((response) =>
+ ({ response, params: { interval: 'year', count_interval: '1', tenenv_id: parameters['tenenvId'] } })),
+ getLastMonthRegisteredUsersCount()
+ .then((response) =>
+ ({ response, params: { interval: 'days', count_interval: '30', tenenv_id: parameters['tenenvId'] } })),
+ getLastWeekRegisteredUsersCount()
+ .then((response) =>
+ ({ response, params: { interval: 'days', count_interval: '7', tenenv_id: parameters['tenenvId'] } })),
+ ])
+
+ var tilesArray = {}
+ results.forEach(({ response, params }, index) => {
+ const { data } = response;
+ data.forEach(element => {
+ if (params["interval"]) {
+ var name = params["interval"] + "_" + params["count_interval"]
+ tilesArray[[name]] = element["count"]
+ }
+ else {
+ tilesArray["overall"] = element["count"]
+ }
+
+ })
+ });
+
+ setTiles(tilesArray)
+ }
+ handleRefetch();
+ }, [])
+
+ return (
+
+
+
+
+
+
+
{tiles["overall"]}
+
Total Registered Users
+
+
+
+
+
+
+
{tiles["year_1"]}
+
Last Year Registered Users
+
+
+
+
+
+
+
{tiles["days_30"]}
+
Last 30 days Registered Users
+
+
+
+
+
+
+
{tiles["days_7"]}
+
Last 7 days Registered Users
+
+
+
+
+
+ )
+}
+
+export default RegisteredUsersTiles
\ No newline at end of file
diff --git a/javascript/src/components/datatable.js b/javascript/src/components/datatable.js
new file mode 100644
index 0000000..c32cbb4
--- /dev/null
+++ b/javascript/src/components/datatable.js
@@ -0,0 +1,235 @@
+import React, { Component } from "react";
+import { renderToString } from 'react-dom/server';
+import "jquery/dist/jquery.min.js";
+import "datatables.net-dt/js/dataTables.dataTables";
+import "datatables.net-dt/css/jquery.dataTables.min.css";
+import "datatables.net-buttons-dt/css/buttons.dataTables.min.css"
+import "datatables.net-buttons/js/dataTables.buttons.js";
+import "datatables.net-buttons/js/buttons.colVis.js";
+import "datatables.net-buttons/js/buttons.flash.js";
+import "datatables.net-buttons/js/buttons.html5.js";
+import "datatables.net-buttons/js/buttons.print.js";
+import pdfMake from "pdfmake/build/pdfmake";
+import pdfFonts from "pdfmake/build/vfs_fonts";
+import $ from "jquery";
+
+pdfMake.vfs = pdfFonts.pdfMake.vfs;
+
+
+var table;
+const title = '';
+
+class Datatable extends Component {
+
+ componentDidUpdate(prevProps, prevState) {
+ var dataTableId = this.props.dataTableId
+ if (prevProps.items !== this.props.items) {
+ this.setState({
+ items: this.props.items,
+ dataTableId: this.props.dataTableId
+ })
+ if (!$.fn.DataTable.isDataTable("#myTable")) {
+ setTimeout(function () {
+ table = $("#" + dataTableId).DataTable({
+ responsive: true,
+ pagingType: "full_numbers",
+ pageLength: 10,
+ //processing: true,
+ dom: "Bfrtip",
+ // select: {
+ // style: "single",
+ // },
+
+ buttons: [
+ {
+ extend: 'collection',
+ text: 'Export',
+ buttons: [
+ {
+ extend: 'copy',
+ exportOptions: {
+ stripHtml: false,
+ format: {
+ body: function (data, row, column, node) {
+ if (column === 3)
+ return data.replace(//g, "").replace(/<\/li>/g, ",").replace(//g, "").replace(/<\/ul>/g, "")
+ else
+ return data.replace(/(<([^>]+)>)/ig, "");
+ }
+ }
+ }
+ },
+ {
+ extend: 'excel',
+ title: title,
+ exportOptions: {
+ stripHtml: false,
+ format: {
+ body: function (data, row, column, node) {
+ if (column === 3)
+ return data.replace("", "").replace(/ /g, ", ").replace(/<\/li>/g, "").replace(//g, "").replace(/<\/ul>/g, "")
+ else
+ return data.replace(/(<([^>]+)>)/ig, "");
+ }
+ }
+ }
+ },
+ {
+ extend: 'csv',
+ title: title,
+ exportOptions: {
+ stripHtml: false,
+ format: {
+ body: function (data, row, column, node) {
+ if (column === 3)
+ return data.replace("", "").replace(/ /g, ", ").replace(/<\/li>/g, "").replace(//g, "").replace(/<\/ul>/g, "")
+ else
+ return data.replace(/(<([^>]+)>)/ig, "");
+ }
+ }
+ }
+ },
+ {
+ extend: 'pdfHtml5',
+ title: title,
+ exportOptions: {
+ stripHtml: false,
+ format: {
+ body: function (data, row, column, node) {
+ if (column === 3)
+ return data.replace(//g, "• ").replace(/<\/li>/g, "\n").replace(/