diff --git a/.github/workflows/chart-release-dispatcher.yaml b/.github/workflows/chart-release-dispatcher.yaml index 8e325cd327..8562625e13 100644 --- a/.github/workflows/chart-release-dispatcher.yaml +++ b/.github/workflows/chart-release-dispatcher.yaml @@ -8,6 +8,7 @@ on: - master - staging - beta + - staging types: - completed @@ -25,6 +26,20 @@ jobs: id: extract_branch run: | echo "branch=${{ github.event.workflow_run.head_branch }}" >> $GITHUB_OUTPUT + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + with: + token: ${{ secrets.my_pat }} + ref: ${{ steps.extract_branch.outputs.branch }} + fetch-depth: 0 + + - name: Get SHA of the branch + id: get_sha + run: | + branch_name=${{ steps.extract_branch.outputs.branch }} + sha=$(git rev-parse "refs/heads/$branch_name") + echo "GIT_SHA: $sha" + echo "sha=${sha}" >> $GITHUB_OUTPUT - name: Extract Repository Name id: extract_repo_name @@ -32,6 +47,20 @@ jobs: repo_name=$(basename $GITHUB_REPOSITORY) echo "repo_name=${repo_name}" >> $GITHUB_OUTPUT + - name: Get PR url and PR User + id: get_pr_url_user + run: | + head_sha=$(curl -s -H "Authorization: Bearer ${{ secrets.my_pat }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}/jobs" | jq -r '.jobs[0].head_sha') + echo "Head SHA: $head_sha" + pr_url=$(curl -s -H "Authorization: Bearer ${{ secrets.my_pat }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/search/issues?q=sha:$head_sha+type:pr" | jq -r '.items[0].html_url') + pr_user=$(curl -s -H "Authorization: Bearer ${{ secrets.my_pat }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/search/issues?q=sha:$head_sha+type:pr" | jq -r '.items[0].user.login') + echo "pr_url=$pr_url" >> $GITHUB_OUTPUT + echo "pr_user=$pr_user" >> $GITHUB_OUTPUT + + - name: echo PR_URL and PR_USER + run: | + echo "${{ steps.get_pr_url_user.outputs.pr_url }}" + echo "${{ steps.get_pr_url_user.outputs.pr_user }}" - name: Repository Dispatch uses: peter-evans/repository-dispatch@v2 with: @@ -42,6 +71,8 @@ jobs: { "repo": { "name": "${{ steps.extract_repo_name.outputs.repo_name }}", - "branch": "${{ steps.extract_branch.outputs.branch }}" + "branch": "${{ steps.extract_branch.outputs.branch }}", + "pr_url": "${{ steps.get_pr_url_user.outputs.pr_url }}", + "pr_user": "${{ steps.get_pr_url_user.outputs.pr_user }}" } - } + } \ No newline at end of file diff --git a/.github/workflows/github-actions-pr-jira.yaml b/.github/workflows/github-actions-pr-jira.yaml new file mode 100644 index 0000000000..76cd01ab38 --- /dev/null +++ b/.github/workflows/github-actions-pr-jira.yaml @@ -0,0 +1,14 @@ +name: GitHub-Jira Link Action +run-name: ${{ github.actor }} is ensuring Jira ID is present in PR title +on: + pull_request: + types: [opened, edited, synchronize, reopened] + branches: [main, staging, master, beta, develop, prod, development] + +jobs: + Enforce-GitHub-Jira-Link-Action: + runs-on: ubuntu-latest + if: ${{ !contains(fromJson('["main", "staging", "master", "beta", "develop", "prod", "development"]'), github.event.pull_request.head.ref) }} + steps: + - name: Enforce Pull Request Title includes Jira Issue Key + uses: ryanvade/enforce-pr-title-style-action@v2.1.1 \ No newline at end of file diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml index 0977cb36a2..05950fed3a 100644 --- a/.github/workflows/maven.yml +++ b/.github/workflows/maven.yml @@ -25,6 +25,7 @@ on: - beta - development - master + - staging - lineageondemand jobs: @@ -51,16 +52,6 @@ jobs: run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})" id: get_branch - - name: Create Maven Settings - uses: s4u/maven-settings-action@v2.8.0 - with: - servers: | - [{ - "id": "github", - "username": "atlan-ci", - "password": "${{ secrets.my_pat }}" - }] - - name: Build with Maven run: | branch_name=${{ steps.get_branch.outputs.branch }} @@ -77,7 +68,7 @@ jobs: shell: bash - name: Get version tag - run: echo "##[set-output name=version;]$(echo `git ls-remote https://${{ secrets.my_pat }}@github.com/atlanhq/${REPOSITORY_NAME}.git ${{ steps.get_branch.outputs.branch }} | awk '{ print $1}' | cut -c1-7`)abcd" + run: echo "##[set-output name=version;]$(echo `git ls-remote https://${{ secrets.ORG_PAT_GITHUB }}@github.com/atlanhq/${REPOSITORY_NAME}.git refs/heads/${{ steps.get_branch.outputs.branch }} | awk '{ print $1}' | cut -c1-7`)abcd" id: get_version - name: Set up Buildx @@ -89,7 +80,7 @@ jobs: with: registry: ghcr.io username: $GITHUB_ACTOR - password: ${{ secrets.my_pat }} + password: ${{ secrets.ORG_PAT_GITHUB }} - name: Build and push id: docker_build @@ -104,7 +95,6 @@ jobs: tags: | ghcr.io/atlanhq/${{ github.event.repository.name }}-${{ steps.get_branch.outputs.branch }}:latest ghcr.io/atlanhq/${{ github.event.repository.name }}-${{ steps.get_branch.outputs.branch }}:${{ steps.get_version.outputs.version }} - - name: Scan Image uses: aquasecurity/trivy-action@master with: diff --git a/.github/workflows/trivy-docker-scan.yml b/.github/workflows/trivy-docker-scan.yml index 6be78e7552..f910348903 100644 --- a/.github/workflows/trivy-docker-scan.yml +++ b/.github/workflows/trivy-docker-scan.yml @@ -29,7 +29,7 @@ jobs: output: 'trivy-results-docker.sarif' exit-code: '1' #ignore-unfixed: true - severity: 'CRITICAL,HIGH,MEDIUM' + severity: 'CRITICAL,HIGH' - name: Upload Trivy Docker Scan Results To GitHub Security tab uses: github/codeql-action/upload-sarif@v2 diff --git a/README.txt b/README.txt index bbe742e465..eaff11d621 100755 --- a/README.txt +++ b/README.txt @@ -71,4 +71,4 @@ Build Process distro/target/apache-atlas--storm-hook.tar.gz distro/target/apache-atlas--falcon-hook.tar.gz -4. For more details on installing and running Apache Atlas, please refer to https://atlas.apache.org/#/Installation. +4. For more details on installing and running Apache Atlas, please refer to https://atlas.apache.org/#/Installation diff --git a/addons/models/0000-Area0/0010-base_model.json b/addons/models/0000-Area0/0010-base_model.json index 4bfd6db963..46cadae18a 100644 --- a/addons/models/0000-Area0/0010-base_model.json +++ b/addons/models/0000-Area0/0010-base_model.json @@ -616,7 +616,7 @@ ], "description": "Model to store auth service in Atlas", "serviceType": "atlan", - "typeVersion": "1.1", + "typeVersion": "1.2", "attributeDefs": [ { "name": "authServiceType", @@ -640,6 +640,17 @@ "skipScrubbing": true, "includeInNotification": true }, + { + "name": "abacService", + "typeName": "string", + "indexType": "STRING", + "cardinality": "SINGLE", + "isIndexable": false, + "isOptional": true, + "isUnique": false, + "skipScrubbing": true, + "includeInNotification": true + }, { "name": "authServiceIsEnabled", "typeName": "boolean", @@ -681,9 +692,19 @@ "Asset" ], "serviceType": "atlan", - "typeVersion": "1.1", + "typeVersion": "1.2", "attributeDefs": [ + { + "name": "policyFilterCriteria", + "typeName": "string", + "cardinality": "SINGLE", + "isIndexable": false, + "isOptional": true, + "isUnique": false, + "skipScrubbing": true, + "includeInNotification": true + }, { "name": "policyType", "typeName": "AuthPolicyType", @@ -1041,6 +1062,29 @@ "includeInNotification": false } ] + }, + { + "name": "StakeholderTitle", + "description": "Instance of a stakeholder title for Domains in Atlan", + "superTypes": [ + "Asset" + ], + "serviceType": "atlan", + "typeVersion": "1.0", + "attributeDefs": [ + { + "name": "stakeholderTitleDomainQualifiedNames", + "description": "qualified name array representing the Domains for which this StakeholderTitle is applicable", + "typeName": "array", + "indexType": "STRING", + "isOptional": true, + "cardinality": "SET", + "isUnique": false, + "isIndexable": false, + "skipScrubbing": true, + "includeInNotification": false + } + ] } ], "relationshipDefs": [ diff --git a/addons/override-policies/glossary_policies.json b/addons/override-policies/glossary_policies.json new file mode 100644 index 0000000000..ed2eec66b6 --- /dev/null +++ b/addons/override-policies/glossary_policies.json @@ -0,0 +1,113 @@ +{ + "entities": + [ + { + "typeName": "AuthPolicy", + "customAttributes": { + "internalId": 28 + }, + "attributes": + { + "name": "READ_GLOSSARY", + "qualifiedName": "READ_GLOSSARY", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 1, + "policyUsers": + [], + "policyGroups": + [], + "policyRoles": + [ + "$admin", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": + [ + "entity-type:AtlasGlossary", + "entity-classification:*", + "entity:*" + ], + "policyActions": + [ + "entity-read" + ] + } + }, + { + "typeName": "AuthPolicy", + "customAttributes": { + "internalId": 29 + }, + "attributes": + { + "name": "READ_TERM", + "qualifiedName": "READ_TERM", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 1, + "policyUsers": + [], + "policyGroups": + [], + "policyRoles": + [ + "$admin", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": + [ + "entity-type:AtlasGlossaryTerm", + "entity-classification:*", + "entity:*" + ], + "policyActions": + [ + "entity-read" + ] + } + }, + { + "typeName": "AuthPolicy", + "customAttributes": { + "internalId": 30 + }, + "attributes": + { + "name": "READ_CATEGORY", + "qualifiedName": "READ_CATEGORY", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 1, + "policyUsers": + [], + "policyGroups": + [], + "policyRoles": + [ + "$admin", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": + [ + "entity-type:AtlasGlossaryCategory", + "entity-classification:*", + "entity:*" + ], + "policyActions": + [ + "entity-read" + ] + } + } + ] +} \ No newline at end of file diff --git a/addons/policies/bootstrap_admin_policies.json b/addons/policies/bootstrap_admin_policies.json index 3cd9d7a62f..5f301ad0af 100644 --- a/addons/policies/bootstrap_admin_policies.json +++ b/addons/policies/bootstrap_admin_policies.json @@ -49,6 +49,31 @@ "admin-task-cud" ] } + }, + { + "typeName": "AuthPolicy", + "attributes": { + "name": "ADMIN_ALLOW_FEATURE_FLAG_CUD", + "qualifiedName": "ADMIN_ALLOW_FEATURE_FLAG_CUD", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 1, + "policyUsers": [ + "service-account-atlan-argo", + "service-account-atlan-backend" + ], + "policyGroups": [], + "policyRoles": [], + "policyResourceCategory": "ADMIN", + "policyResources": [ + "atlas-service:*" + ], + "policyActions": [ + "admin-featureFlag-cud" + ] + } } ] } \ No newline at end of file diff --git a/addons/policies/bootstrap_entity_policies.json b/addons/policies/bootstrap_entity_policies.json index 24ecee3421..2f9d214288 100644 --- a/addons/policies/bootstrap_entity_policies.json +++ b/addons/policies/bootstrap_entity_policies.json @@ -2886,7 +2886,7 @@ [ "entity-type:DataDomain", "entity-classification:*", - "entity:*" + "entity:*/super" ], "policyActions": [ @@ -2897,6 +2897,42 @@ ] } }, + { + "typeName": "AuthPolicy", + "attributes": + { + "name": "RD_DATA_MESH_ENTITIES", + "qualifiedName": "RD_DATA_MESH_ENTITIES", + "description": "Allows admins to perform delete operation on data mesh assets", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 0, + "policyUsers": + [], + "policyGroups": + [], + "policyRoles": + [ + "$admin", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": + [ + + "entity-type:DataDomain", + "entity-classification:*", + "entity:*" + ], + "policyActions": + [ + "entity-read", + "entity-delete" + ] + } + }, { "typeName": "AuthPolicy", "attributes": @@ -2959,6 +2995,7 @@ "policyResourceCategory": "ENTITY", "policyResources": [ + "entity-type:DataDomain", "entity-classification:*", "entity:*" @@ -3005,6 +3042,291 @@ "entity-delete" ] } + }, + { + "typeName": "AuthPolicy", + "attributes": + { + "name": "READ_DATA_CONTRACT", + "qualifiedName": "READ_DATA_CONTRACT", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 1, + "policyUsers": + [], + "policyGroups": + [], + "policyRoles": + [ + "$admin", + "$member", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": + [ + "entity-type:DataContract", + "entity-classification:*", + "entity:*" + ], + "policyActions": + [ + "entity-read" + ] + } + }, + { + "typeName": "AuthPolicy", + "attributes": + { + "name": "CU_DATA_CONTRACT", + "qualifiedName": "CU_DATA_CONTRACT", + "description": "cu allow for data contract", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 1, + "policyUsers": + [], + "policyGroups": + [], + "policyRoles": + [ + "$admin", + "$member", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": + [ + "entity-type:DataContract", + "entity-classification:*", + "entity:*" + ], + "policyActions": + [ + "entity-create", + "entity-update" + ] + } + }, + { + "typeName": "AuthPolicy", + "attributes": + { + "name": "READ_DATA_MESH_STAKEHOLDER_TITLE", + "qualifiedName": "READ_DATA_MESH_STAKEHOLDER_TITLE", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyUsers": + [], + "policyGroups": + [], + "policyRoles": + [ + "$admin", + "$member", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": + [ + "entity-type:StakeholderTitle", + "entity-classification:*", + "entity:*" + ], + "policyActions": + [ + "entity-read" + ] + } + }, + + { + "typeName": "AuthPolicy", + "attributes": { + "name": "CUD_BUSINESS_POLICY", + "qualifiedName": "CUD_BUSINESS_POLICY", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 0, + "policyUsers": [], + "policyGroups": [], + "policyRoles": [ + "$admin", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": [ + "entity-type:BusinessPolicy", + "entity-type:BusinessPolicyException", + "entity-classification:*", + "entity:*" + ], + "policyActions": [ + "entity-create", + "entity-update", + "entity-delete" + ] + } + }, + { + "typeName": "AuthPolicy", + "attributes": { + "name": "READ_BUSINESS_POLICY", + "qualifiedName": "READ_BUSINESS_POLICY", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 0, + "policyUsers": [], + "policyGroups": [], + "policyRoles": [ + "$admin", + "$guest", + "$member", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": [ + "entity-type:BusinessPolicy", + "entity-type:BusinessPolicyException", + "entity-classification:*", + "entity:*" + ], + "policyActions": [ + "entity-read" + ] + } + }, + + { + "typeName": "AuthPolicy", + "attributes": { + "name": "CUD_INCIDENT", + "qualifiedName": "CUD_INCIDENT", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 0, + "policyUsers": [], + "policyGroups": [], + "policyRoles": [ + "$admin", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": [ + "entity-type:Incident", + "entity-classification:*", + "entity:*" + ], + "policyActions": [ + "entity-create", + "entity-update", + "entity-delete" + ] + } + }, + { + "typeName": "AuthPolicy", + "attributes": { + "name": "READ_INCIDENT" , + "qualifiedName": "READ_INCIDENT", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 0, + "policyUsers": [], + "policyGroups": [], + "policyRoles": [ + "$admin", + "$guest", + "$member", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": [ + "entity-type:Incident", + "entity-classification:*", + "entity:*" + ], + "policyActions": [ + "entity-read" + ] + } + }, + { + "typeName": "AuthPolicy", + "attributes": { + "name": "CRUD_BUSINESS_POLICY_LOG", + "qualifiedName": "CRUD_BUSINESS_POLICY_LOG", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 0, + "policyUsers": [], + "policyGroups": [], + "policyRoles": [ + "$admin", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": [ + "entity-type:BusinessPolicyLog", + "entity-classification:*", + "entity:*" + ], + "policyActions": [ + "entity-create", + "entity-read", + "entity-update", + "entity-delete" + ] + } + }, + { + "typeName": "AuthPolicy", + "attributes": { + "name": "CRUD_TASK", + "qualifiedName": "CRUD_TASK", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 0, + "policyUsers": [], + "policyGroups": [], + "policyRoles": [ + "$admin", + "$guest", + "$member", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": [ + "entity-type:Task", + "entity-classification:*", + "entity:*" + ], + "policyActions": [ + "entity-create", + "entity-read", + "entity-update", + "entity-delete" + ] + } } ] } diff --git a/addons/policies/bootstrap_heka_policies.json b/addons/policies/bootstrap_heka_policies.json index d092186d78..4a9aeb3935 100644 --- a/addons/policies/bootstrap_heka_policies.json +++ b/addons/policies/bootstrap_heka_policies.json @@ -13,6 +13,7 @@ "policyServiceName": "heka", "policyType": "deny", "policyPriority": 1, + "isPolicyEnabled": false, "policyUsers": [], "policyGroups": [], "policyRoles": @@ -32,4 +33,4 @@ } } ] -} \ No newline at end of file +} diff --git a/addons/policies/bootstrap_relationship_policies.json b/addons/policies/bootstrap_relationship_policies.json index 6c44567b87..86a9efa016 100644 --- a/addons/policies/bootstrap_relationship_policies.json +++ b/addons/policies/bootstrap_relationship_policies.json @@ -646,6 +646,7 @@ "typeName": "AuthPolicy", "attributes": { + "name": "LINK_MESH_DATA_DOMAIN_TO_DATA_PRODUCT", "qualifiedName": "LINK_MESH_DATA_DOMAIN_TO_DATA_PRODUCT", "policyCategory": "bootstrap", @@ -760,6 +761,82 @@ "remove-relationship" ] } + }, + { + "typeName": "AuthPolicy", + "attributes": + { + "name": "LINK_BUSINESS_POLICY_BUSINESS_POLICY", + "qualifiedName": "LINK_BUSINESS_POLICY_BUSINESS_POLICY", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyUsers": + [], + "policyGroups": + [], + "policyRoles": + [ + "$admin", + "$api-token-default-access" + ], + "policyResourceCategory": "RELATIONSHIP", + "policyResources": + [ + "end-one-entity-classification:*", + "end-two-entity-classification:*", + "end-one-entity:*", + "end-two-entity:*", + "end-one-entity-type:BusinessPolicy", + "end-two-entity-type:BusinessPolicy", + "relationship-type:RelatedBusinessPolicy" + ], + "policyActions": + [ + "add-relationship", + "update-relationship", + "remove-relationship" + ] + } + }, + { + "typeName": "AuthPolicy", + "attributes": + { + "name": "LINK_BUSINESS_POLICY_BUSINESS_POLICY_EXCEPTION", + "qualifiedName": "LINK_BUSINESS_POLICY_BUSINESS_POLICY_EXCEPTION", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyUsers": + [], + "policyGroups": + [], + "policyRoles": + [ + "$admin", + "$api-token-default-access" + ], + "policyResourceCategory": "RELATIONSHIP", + "policyResources": + [ + "end-one-entity-classification:*", + "end-two-entity-classification:*", + "end-one-entity:*", + "end-two-entity:*", + "end-one-entity-type:BusinessPolicy", + "end-two-entity-type:BusinessPolicyException", + "relationship-type:BusinessPolicy_BusinessPolicyException" + ], + "policyActions": + [ + "add-relationship", + "update-relationship", + "remove-relationship" + ] + } } ] } \ No newline at end of file diff --git a/addons/policies/global_stakeholder-titles.json b/addons/policies/global_stakeholder-titles.json new file mode 100644 index 0000000000..a32088f11e --- /dev/null +++ b/addons/policies/global_stakeholder-titles.json @@ -0,0 +1,41 @@ +{ + "entities": [ + { + "typeName": "StakeholderTitle", + "attributes": + { + "qualifiedName": "stakeholderTitle/default/DOMAIN_OWNER", + "name": "Domain Owner", + "stakeholderTitleDomainQualifiedNames": ["*/super"] + } + }, + { + "typeName": "StakeholderTitle", + "attributes": + { + "qualifiedName": "stakeholderTitle/default/DATA_PRODUCT_OWNER", + "name": "Data Product Owner", + "stakeholderTitleDomainQualifiedNames": ["*/super"] + } + }, + { + "typeName": "StakeholderTitle", + "attributes": + { + "qualifiedName": "stakeholderTitle/default/DATA_ENGINEER", + "name": "Data Engineer", + "stakeholderTitleDomainQualifiedNames": ["*/super"] + } + }, + { + "typeName": "StakeholderTitle", + "attributes": + { + "qualifiedName": "stakeholderTitle/default/ARCHITECT", + "name": "Architect", + "stakeholderTitleDomainQualifiedNames": ["*/super"] + } + } + ] +} + diff --git a/addons/static/templates/policy_cache_transformer_persona.json b/addons/static/templates/policy_cache_transformer_persona.json index a61b671090..b9541abf5b 100644 --- a/addons/static/templates/policy_cache_transformer_persona.json +++ b/addons/static/templates/policy_cache_transformer_persona.json @@ -404,6 +404,58 @@ "entity-remove-classification" ] }, + { + "policyResourceCategory": "ENTITY", + "description": "Create Stakeholder for this Domain", + "policyType": "ACCESS", + "resources": [ + "entity:default/*/{entity}", + "entity-type:Stakeholder", + "entity-classification:*" + ], + "actions": [ + "entity-read", + "entity-create", + "entity-update", + "entity-delete" + ] + }, + { + "policyResourceCategory": "RELATIONSHIP", + "policyType": "ACCESS", + "description": "Link/unlink Stakeholder to this Domain", + + "resources": [ + "relationship-type:*", + + "end-one-entity-type:DataDomain", + "end-one-entity-classification:*", + "end-one-entity:{entity}", + + "end-two-entity-type:Stakeholder", + "end-two-entity-classification:*", + "end-two-entity:default/*/{entity}" + ], + "actions": ["add-relationship", "update-relationship", "remove-relationship"] + }, + { + "policyResourceCategory": "RELATIONSHIP", + "policyType": "ACCESS", + "description": "Link/unlink any Stakeholder Title to this Domain's Stakeholder", + + "resources": [ + "relationship-type:*", + + "end-one-entity-type:StakeholderTitle", + "end-one-entity-classification:*", + "end-one-entity:*", + + "end-two-entity-type:Stakeholder", + "end-two-entity-classification:*", + "end-two-entity:default/*/{entity}" + ], + "actions": ["add-relationship", "update-relationship", "remove-relationship"] + }, { "policyResourceCategory": "RELATIONSHIP", "policyType": "ACCESS", @@ -480,7 +532,19 @@ "actions": ["add-relationship", "update-relationship", "remove-relationship"] } ], - + "persona-domain-business-update-metadata": [ + { + "policyType": "ACCESS", + "policyResourceCategory": "ENTITY", + "resources": [ + "entity:{entity}", + "entity-type:DataDomain", + "entity-classification:*", + "entity-business-metadata:*" + ], + "actions": ["entity-update-business-metadata"] + } + ], "persona-domain-sub-domain-read": [ { @@ -542,6 +606,59 @@ "entity-remove-classification" ] }, + { + "policyResourceCategory": "ENTITY", + "policyType": "ACCESS", + "description": "Create Stakeholder for Sub Domains", + + "resources": [ + "entity:default/*/{entity}/*", + "entity-type:Stakeholder", + "entity-classification:*" + ], + "actions": [ + "entity-read", + "entity-create", + "entity-update", + "entity-delete" + ] + }, + { + "policyResourceCategory": "RELATIONSHIP", + "policyType": "ACCESS", + "description": "Link/unlink Stakeholder to Sub Domains", + + "resources": [ + "relationship-type:*", + + "end-one-entity-type:DataDomain", + "end-one-entity-classification:*", + "end-one-entity:{entity}/*domain/*", + + "end-two-entity-type:Stakeholder", + "end-two-entity-classification:*", + "end-two-entity:default/*/{entity}/*" + ], + "actions": ["add-relationship", "update-relationship", "remove-relationship"] + }, + { + "policyResourceCategory": "RELATIONSHIP", + "policyType": "ACCESS", + "description": "Link/unlink any Stakeholder Title to sub-domains's Stakeholder", + + "resources": [ + "relationship-type:*", + + "end-one-entity-type:StakeholderTitle", + "end-one-entity-classification:*", + "end-one-entity:*", + + "end-two-entity-type:Stakeholder", + "end-two-entity-classification:*", + "end-two-entity:default/*/{entity}/*" + ], + "actions": ["add-relationship", "update-relationship", "remove-relationship"] + }, { "policyResourceCategory": "RELATIONSHIP", "policyType": "ACCESS", @@ -589,6 +706,19 @@ "actions": ["entity-delete"] } ], + "persona-domain-sub-domain-business-update-metadata": [ + { + "policyType": "ACCESS", + "policyResourceCategory": "ENTITY", + "resources": [ + "entity:{entity}/*domain/*", + "entity-type:DataDomain", + "entity-classification:*", + "entity-business-metadata:*" + ], + "actions": ["entity-update-business-metadata"] + } + ], "persona-domain-product-read": [ { @@ -719,8 +849,19 @@ "actions": ["entity-delete"] } ], - - + "persona-domain-product-business-update-metadata": [ + { + "policyType": "ACCESS", + "policyResourceCategory": "ENTITY", + "resources": [ + "entity:{entity}/*product/*", + "entity-type:DataProduct", + "entity-classification:*", + "entity-business-metadata:*" + ], + "actions": ["entity-update-business-metadata"] + } + ], "select": [ { diff --git a/auth-agents-common/pom.xml b/auth-agents-common/pom.xml index aa37156d5f..c7900dd2d8 100644 --- a/auth-agents-common/pom.xml +++ b/auth-agents-common/pom.xml @@ -55,6 +55,19 @@ ${project.version} + + + org.apache.atlas + auth-common + ${project.version} + + + + org.apache.atlas + auth-common + ${project.version} + + org.apache.atlas auth-audits @@ -71,12 +84,14 @@ jersey-client + - org.codehaus.jackson - jackson-jaxrs - ${jackson-jaxrs.version} + com.fasterxml.jackson.jaxrs + jackson-jaxrs-json-provider + 2.15.0 + com.google.guava guava @@ -104,7 +119,7 @@ shade - false + true true @@ -114,12 +129,14 @@ **/*.txt **/*.yml **/*.properties + META-INF/maven/** org.slf4j:* + org.apache.logging.log4j:* com.google.api.grpc:* org.janusgraph:* com.google.protobuf:* @@ -129,6 +146,16 @@ com.datastax.oss:* org.apache.tinkerpop org.keycloak:* + org.apache.cassandra:* + org.elasticsearch:* + org.elasticsearch.client:* + org.apache.hadoop:hadoop-hdfs-client + org.apache.kerby:* + com.fasterxml.jackson.core:* + org.redisson:* + org.codehaus:* + io.netty:* + io.netty.netty-all:* diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerBaseModelObject.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerBaseModelObject.java deleted file mode 100644 index aba6661426..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerBaseModelObject.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.model; - -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlRootElement; -import java.util.Date; - -@JsonInclude(JsonInclude.Include.NON_NULL) -@XmlRootElement -@XmlAccessorType(XmlAccessType.FIELD) -public class RangerBaseModelObject implements java.io.Serializable { - private static final long serialVersionUID = 1L; - - private Long id; - private String guid; - private Boolean isEnabled; - private String createdBy; - private String updatedBy; - private Date createTime; - private Date updateTime; - private Long version; - - public RangerBaseModelObject() { - setIsEnabled(null); - } - - public void updateFrom(RangerBaseModelObject other) { - setIsEnabled(other.getIsEnabled()); - } - - /** - * @return the id - */ - public Long getId() { - return id; - } - /** - * @param id the id to set - */ - public void setId(Long id) { - this.id = id; - } - /** - * @return the guid - */ - public String getGuid() { - return guid; - } - /** - * @param guid the guid to set - */ - public void setGuid(String guid) { - this.guid = guid; - } - /** - * @return the isEnabled - */ - public Boolean getIsEnabled() { - return isEnabled; - } - /** - * @param isEnabled the isEnabled to set - */ - public void setIsEnabled(Boolean isEnabled) { - this.isEnabled = isEnabled == null ? Boolean.TRUE : isEnabled; - } - /** - * @return the createdBy - */ - public String getCreatedBy() { - return createdBy; - } - /** - * @param createdBy the createdBy to set - */ - public void setCreatedBy(String createdBy) { - this.createdBy = createdBy; - } - /** - * @return the updatedBy - */ - public String getUpdatedBy() { - return updatedBy; - } - /** - * @param updatedBy the updatedBy to set - */ - public void setUpdatedBy(String updatedBy) { - this.updatedBy = updatedBy; - } - /** - * @return the createTime - */ - public Date getCreateTime() { - return createTime; - } - /** - * @param createTime the createTime to set - */ - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - /** - * @return the updateTime - */ - public Date getUpdateTime() { - return updateTime; - } - /** - * @param updateTime the updateTime to set - */ - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - /** - * @return the version - */ - public Long getVersion() { - return version; - } - /** - * @param version the version to set - */ - public void setVersion(Long version) { - this.version = version; - } - - @Override - public String toString( ) { - StringBuilder sb = new StringBuilder(); - - toString(sb); - - return sb.toString(); - } - - public StringBuilder toString(StringBuilder sb) { - sb.append("id={").append(id).append("} "); - sb.append("guid={").append(guid).append("} "); - sb.append("isEnabled={").append(isEnabled).append("} "); - sb.append("createdBy={").append(createdBy).append("} "); - sb.append("updatedBy={").append(updatedBy).append("} "); - sb.append("createTime={").append(createTime).append("} "); - sb.append("updateTime={").append(updateTime).append("} "); - sb.append("version={").append(version).append("} "); - - return sb; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicy.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicy.java deleted file mode 100644 index f81f8e2854..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicy.java +++ /dev/null @@ -1,1703 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.model; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlRootElement; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - - -@JsonInclude(JsonInclude.Include.NON_NULL) -@XmlRootElement -@XmlAccessorType(XmlAccessType.FIELD) -public class RangerPolicy extends RangerBaseModelObject implements java.io.Serializable { - public static final String POLICY_TYPE_ACCESS = "ACCESS"; - public static final String POLICY_TYPE_DATAMASK = "DATA_MASK"; - public static final String POLICY_TYPE_ROWFILTER = "ROW_FILTER"; - public static final String POLICY_TYPE_AUDIT = "AUDIT"; - - public static final String[] POLICY_TYPES = new String[] { - POLICY_TYPE_ACCESS, - POLICY_TYPE_DATAMASK, - POLICY_TYPE_ROWFILTER - }; - - public static final String MASK_TYPE_NULL = "MASK_NULL"; - public static final String MASK_TYPE_NONE = "MASK_NONE"; - public static final String MASK_TYPE_CUSTOM = "CUSTOM"; - - public static final int POLICY_PRIORITY_NORMAL = 0; - public static final int POLICY_PRIORITY_OVERRIDE = 1; - - public static final String POLICY_PRIORITY_NAME_NORMAL = "NORMAL"; - public static final String POLICY_PRIORITY_NAME_OVERRIDE = "OVERRIDE"; - - public static final Comparator POLICY_ID_COMPARATOR = new PolicyIdComparator(); - - // For future use - private static final long serialVersionUID = 1L; - - private String service; - private String name; - private String policyType; - private Integer policyPriority; - private String description; - private String resourceSignature; - private Boolean isAuditEnabled; - private Map resources; - private List conditions; - private List policyItems; - private List denyPolicyItems; - private List allowExceptions; - private List denyExceptions; - private List dataMaskPolicyItems; - private List rowFilterPolicyItems; - private String serviceType; - private Map options; - private List validitySchedules; - private List policyLabels; - private String zoneName; - private Boolean isDenyAllElse; - private Map attributes; - - public RangerPolicy() { - this(null, null, null, null, null, null, null, null, null, null, null); - } - - public RangerPolicy(String service, String name, String policyType, Integer policyPriority, String description, Map resources, List policyItems, String resourceSignature, Map options, List validitySchedules, List policyLables) { - this(service, name, policyType, policyPriority, description, resources, policyItems, resourceSignature, options, validitySchedules, policyLables, null); - } - - public RangerPolicy(String service, String name, String policyType, Integer policyPriority, String description, Map resources, List policyItems, String resourceSignature, Map options, List validitySchedules, List policyLables, String zoneName) { - this(service, name, policyType, policyPriority, description, resources, policyItems, resourceSignature, options, validitySchedules, policyLables, zoneName, null); - } - - public RangerPolicy(String service, String name, String policyType, Integer policyPriority, String description, Map resources, List policyItems, String resourceSignature, Map options, List validitySchedules, List policyLables, String zoneName, List conditions) { - this(service, name, policyType, policyPriority, description, resources, policyItems, resourceSignature, options, validitySchedules, policyLables, zoneName, conditions, null); - } - - /** - * @param service - * @param name - * @param policyType - * @param description - * @param resources - * @param policyItems - * @param resourceSignature TODO - */ - public RangerPolicy(String service, String name, String policyType, Integer policyPriority, String description, Map resources, List policyItems, String resourceSignature, Map options, List validitySchedules, List policyLables, String zoneName, List conditions, Boolean isDenyAllElse) { - super(); - - setService(service); - setName(name); - setPolicyType(policyType); - setPolicyPriority(policyPriority); - setDescription(description); - setResourceSignature(resourceSignature); - setIsAuditEnabled(null); - setResources(resources); - setPolicyItems(policyItems); - setDenyPolicyItems(null); - setAllowExceptions(null); - setDenyExceptions(null); - setDataMaskPolicyItems(null); - setRowFilterPolicyItems(null); - setOptions(options); - setValiditySchedules(validitySchedules); - setPolicyLabels(policyLables); - setZoneName(zoneName); - setConditions(conditions); - setIsDenyAllElse(isDenyAllElse); - - } - - /** - * @param other - */ - public void updateFrom(RangerPolicy other) { - super.updateFrom(other); - - setService(other.getService()); - setName(other.getName()); - setPolicyType(other.getPolicyType()); - setPolicyPriority(other.getPolicyPriority()); - setDescription(other.getDescription()); - setResourceSignature(other.getResourceSignature()); - setIsAuditEnabled(other.getIsAuditEnabled()); - setResources(other.getResources()); - setConditions(other.getConditions()); - setPolicyItems(other.getPolicyItems()); - setDenyPolicyItems(other.getDenyPolicyItems()); - setAllowExceptions(other.getAllowExceptions()); - setDenyExceptions(other.getDenyExceptions()); - setDataMaskPolicyItems(other.getDataMaskPolicyItems()); - setRowFilterPolicyItems(other.getRowFilterPolicyItems()); - setServiceType(other.getServiceType()); - setOptions(other.getOptions()); - setValiditySchedules(other.getValiditySchedules()); - setPolicyLabels(other.getPolicyLabels()); - setZoneName(other.getZoneName()); - setIsDenyAllElse(other.getIsDenyAllElse()); - } - - public Map getAttributes() { - return attributes; - } - - public void setAttributes(Map attributes) { - this.attributes = attributes; - } - - /** - * @return the type - */ - public String getService() { - return service; - } - - /** - * @param service the type to set - */ - public void setService(String service) { - this.service = service; - } - - /** - * @return the name - */ - public String getName() { - return name; - } - - /** - * @param name the name to set - */ - public void setName(String name) { - this.name = name; - } - - /** - * @return the policyType - */ - public String getPolicyType() { - return policyType; - } - - /** - * @param policyType the policyType to set - */ - public void setPolicyType(String policyType) { - this.policyType = policyType; - } - - /** - * @return the policyPriority - */ - public Integer getPolicyPriority() { - return policyPriority; - } - - /** - * @param policyPriority the policyPriority to set - */ - public void setPolicyPriority(Integer policyPriority) { - this.policyPriority = policyPriority == null ? RangerPolicy.POLICY_PRIORITY_NORMAL : policyPriority; - } - - /** - * @return the description - */ - public String getDescription() { - return description; - } - - /** - * @param description the description to set - */ - public void setDescription(String description) { - this.description = description; - } - - /** - * @return the resourceSignature - */ - public String getResourceSignature() { - return resourceSignature; - } - - /** - * @param resourceSignature the resourceSignature to set - */ - public void setResourceSignature(String resourceSignature) { - this.resourceSignature = resourceSignature; - } - - /** - * @return the isAuditEnabled - */ - public Boolean getIsAuditEnabled() { - return isAuditEnabled; - } - - /** - * @param isAuditEnabled the isEnabled to set - */ - public void setIsAuditEnabled(Boolean isAuditEnabled) { - this.isAuditEnabled = isAuditEnabled == null ? Boolean.TRUE : isAuditEnabled; - } - - public String getServiceType() { - return serviceType; - } - - public void setServiceType(String serviceType) { - this.serviceType = serviceType; - } - - public List getPolicyLabels() { - return policyLabels; - } - - public void setPolicyLabels(List policyLabels) { - if (this.policyLabels == null) { - this.policyLabels = new ArrayList<>(); - } - - if (this.policyLabels == policyLabels) { - return; - } - - this.policyLabels.clear(); - - if (policyLabels != null) { - this.policyLabels.addAll(policyLabels); - } - } - - /** - * @return the resources - */ - public Map getResources() { - return resources; - } - - /** - * @param resources the resources to set - */ - public void setResources(Map resources) { - if(this.resources == null) { - this.resources = new HashMap<>(); - } - - if(this.resources == resources) { - return; - } - - this.resources.clear(); - - if(resources != null) { - for(Map.Entry e : resources.entrySet()) { - this.resources.put(e.getKey(), e.getValue()); - } - } - } - - /** - * @return the policyItems - */ - public List getPolicyItems() { - return policyItems; - } - - /** - * @param policyItems the policyItems to set - */ - public void setPolicyItems(List policyItems) { - if(this.policyItems == null) { - this.policyItems = new ArrayList<>(); - } - - if(this.policyItems == policyItems) { - return; - } - - this.policyItems.clear(); - - if(policyItems != null) { - this.policyItems.addAll(policyItems); - } - } - - /** - * @return the denyPolicyItems - */ - public List getDenyPolicyItems() { - return denyPolicyItems; - } - - /** - * @param denyPolicyItems the denyPolicyItems to set - */ - public void setDenyPolicyItems(List denyPolicyItems) { - if(this.denyPolicyItems == null) { - this.denyPolicyItems = new ArrayList<>(); - } - - if(this.denyPolicyItems == denyPolicyItems) { - return; - } - - this.denyPolicyItems.clear(); - - if(denyPolicyItems != null) { - this.denyPolicyItems.addAll(denyPolicyItems); - } - } - - /** - * @return the allowExceptions - */ - public List getAllowExceptions() { - return allowExceptions; - } - - /** - * @param allowExceptions the allowExceptions to set - */ - public void setAllowExceptions(List allowExceptions) { - if(this.allowExceptions == null) { - this.allowExceptions = new ArrayList<>(); - } - - if(this.allowExceptions == allowExceptions) { - return; - } - - this.allowExceptions.clear(); - - if(allowExceptions != null) { - this.allowExceptions.addAll(allowExceptions); - } - } - - /** - * @return the denyExceptions - */ - public List getDenyExceptions() { - return denyExceptions; - } - - /** - * @param denyExceptions the denyExceptions to set - */ - public void setDenyExceptions(List denyExceptions) { - if(this.denyExceptions == null) { - this.denyExceptions = new ArrayList<>(); - } - - if(this.denyExceptions == denyExceptions) { - return; - } - - this.denyExceptions.clear(); - - if(denyExceptions != null) { - this.denyExceptions.addAll(denyExceptions); - } - } - - public List getDataMaskPolicyItems() { - return dataMaskPolicyItems; - } - - public void setDataMaskPolicyItems(List dataMaskPolicyItems) { - if(this.dataMaskPolicyItems == null) { - this.dataMaskPolicyItems = new ArrayList<>(); - } - - if(this.dataMaskPolicyItems == dataMaskPolicyItems) { - return; - } - - this.dataMaskPolicyItems.clear(); - - if(dataMaskPolicyItems != null) { - this.dataMaskPolicyItems.addAll(dataMaskPolicyItems); - } - } - - public List getRowFilterPolicyItems() { - return rowFilterPolicyItems; - } - - public void setRowFilterPolicyItems(List rowFilterPolicyItems) { - if(this.rowFilterPolicyItems == null) { - this.rowFilterPolicyItems = new ArrayList<>(); - } - - if(this.rowFilterPolicyItems == rowFilterPolicyItems) { - return; - } - - this.rowFilterPolicyItems.clear(); - - if(rowFilterPolicyItems != null) { - this.rowFilterPolicyItems.addAll(rowFilterPolicyItems); - } - } - - public Map getOptions() { return options; } - - public void setOptions(Map options) { - if (this.options == null) { - this.options = new HashMap<>(); - } - if (this.options == options) { - return; - } - this.options.clear(); - - if(options != null) { - for(Map.Entry e : options.entrySet()) { - this.options.put(e.getKey(), e.getValue()); - } - } - } - - public List getValiditySchedules() { return validitySchedules; } - - public void setValiditySchedules(List validitySchedules) { - if (this.validitySchedules == null) { - this.validitySchedules = new ArrayList<>(); - } - if (this.validitySchedules == validitySchedules) { - return; - } - this.validitySchedules.clear(); - - if(validitySchedules != null) { - this.validitySchedules.addAll(validitySchedules); - } - } - public String getZoneName() { return zoneName; } - - public void setZoneName(String zoneName) { - this.zoneName = zoneName; - } - - /** - * @return the conditions - */ - public List getConditions() { return conditions; } - /** - * @param conditions the conditions to set - */ - public void setConditions(List conditions) { - this.conditions = conditions; - } - - public Boolean getIsDenyAllElse() { - return isDenyAllElse; - } - - public void setIsDenyAllElse(Boolean isDenyAllElse) { - this.isDenyAllElse = isDenyAllElse == null ? Boolean.FALSE : isDenyAllElse; - } - - @Override - public String toString( ) { - StringBuilder sb = new StringBuilder(); - - toString(sb); - - return sb.toString(); - } - - public StringBuilder toString(StringBuilder sb) { - sb.append("RangerPolicy={"); - - super.toString(sb); - - sb.append("service={").append(service).append("} "); - sb.append("name={").append(name).append("} "); - sb.append("policyType={").append(policyType).append("} "); - sb.append("policyPriority={").append(policyPriority).append("} "); - sb.append("description={").append(description).append("} "); - sb.append("resourceSignature={").append(resourceSignature).append("} "); - sb.append("isAuditEnabled={").append(isAuditEnabled).append("} "); - sb.append("serviceType={").append(serviceType).append("} "); - - sb.append("resources={"); - if(resources != null) { - for(Map.Entry e : resources.entrySet()) { - sb.append(e.getKey()).append("={"); - e.getValue().toString(sb); - sb.append("} "); - } - } - sb.append("} "); - sb.append("policyLabels={"); - if(policyLabels != null) { - for(String policyLabel : policyLabels) { - if(policyLabel != null) { - sb.append(policyLabel).append(" "); - } - } - } - sb.append("} "); - - sb.append("policyConditions={"); - if(conditions != null) { - for(RangerPolicyItemCondition condition : conditions) { - if(condition != null) { - condition.toString(sb); - } - } - } - sb.append("} "); - - sb.append("policyItems={"); - if(policyItems != null) { - for(RangerPolicyItem policyItem : policyItems) { - if(policyItem != null) { - policyItem.toString(sb); - } - } - } - sb.append("} "); - - sb.append("denyPolicyItems={"); - if(denyPolicyItems != null) { - for(RangerPolicyItem policyItem : denyPolicyItems) { - if(policyItem != null) { - policyItem.toString(sb); - } - } - } - sb.append("} "); - - sb.append("allowExceptions={"); - if(allowExceptions != null) { - for(RangerPolicyItem policyItem : allowExceptions) { - if(policyItem != null) { - policyItem.toString(sb); - } - } - } - sb.append("} "); - - sb.append("denyExceptions={"); - if(denyExceptions != null) { - for(RangerPolicyItem policyItem : denyExceptions) { - if(policyItem != null) { - policyItem.toString(sb); - } - } - } - sb.append("} "); - - sb.append("dataMaskPolicyItems={"); - if(dataMaskPolicyItems != null) { - for(RangerDataMaskPolicyItem dataMaskPolicyItem : dataMaskPolicyItems) { - if(dataMaskPolicyItem != null) { - dataMaskPolicyItem.toString(sb); - } - } - } - sb.append("} "); - - sb.append("rowFilterPolicyItems={"); - if(rowFilterPolicyItems != null) { - for(RangerRowFilterPolicyItem rowFilterPolicyItem : rowFilterPolicyItems) { - if(rowFilterPolicyItem != null) { - rowFilterPolicyItem.toString(sb); - } - } - } - sb.append("} "); - - sb.append("options={"); - if(options != null) { - for(Map.Entry e : options.entrySet()) { - sb.append(e.getKey()).append("={"); - sb.append(e.getValue().toString()); - sb.append("} "); - } - } - sb.append("} "); - - //sb.append("validitySchedules={").append(validitySchedules).append("} "); - sb.append("validitySchedules={"); - if (CollectionUtils.isNotEmpty(validitySchedules)) { - for (RangerValiditySchedule schedule : validitySchedules) { - if (schedule != null) { - sb.append("schedule={").append(schedule).append("}"); - } - } - } - sb.append(", zoneName=").append(zoneName); - - sb.append(", isDenyAllElse={").append(isDenyAllElse).append("} "); - - sb.append("}"); - - sb.append("}"); - - return sb; - } - - static class PolicyIdComparator implements Comparator, java.io.Serializable { - @Override - public int compare(RangerPolicy me, RangerPolicy other) { - return Long.compare(me.getId(), other.getId()); - } - } - - @JsonInclude(JsonInclude.Include.NON_NULL) - @XmlRootElement - @XmlAccessorType(XmlAccessType.FIELD) - public static class RangerPolicyResource implements java.io.Serializable { - private static final long serialVersionUID = 1L; - - private List values; - private Boolean isExcludes; - private Boolean isRecursive; - - public RangerPolicyResource() { - this((List)null, null, null); - } - - public RangerPolicyResource(String value) { - setValue(value); - setIsExcludes(null); - setIsRecursive(null); - } - - public RangerPolicyResource(String value, Boolean isExcludes, Boolean isRecursive) { - setValue(value); - setIsExcludes(isExcludes); - setIsRecursive(isRecursive); - } - - public RangerPolicyResource(List values, Boolean isExcludes, Boolean isRecursive) { - setValues(values); - setIsExcludes(isExcludes); - setIsRecursive(isRecursive); - } - - /** - * @return the values - */ - public List getValues() { - return values; - } - - /** - * @param values the values to set - */ - public void setValues(List values) { - if(this.values == null) { - this.values = new ArrayList<>(); - } - - if(this.values == values) { - return; - } - - this.values.clear(); - - if(values != null) { - this.values.addAll(values); - } - } - - /** - * @param value the value to set - */ - public void setValue(String value) { - if(this.values == null) { - this.values = new ArrayList<>(); - } - - this.values.clear(); - - this.values.add(value); - } - - /** - * @return the isExcludes - */ - public Boolean getIsExcludes() { - return isExcludes; - } - - /** - * @param isExcludes the isExcludes to set - */ - public void setIsExcludes(Boolean isExcludes) { - this.isExcludes = isExcludes == null ? Boolean.FALSE : isExcludes; - } - - /** - * @return the isRecursive - */ - public Boolean getIsRecursive() { - return isRecursive; - } - - /** - * @param isRecursive the isRecursive to set - */ - public void setIsRecursive(Boolean isRecursive) { - this.isRecursive = isRecursive == null ? Boolean.FALSE : isRecursive; - } - - @Override - public String toString( ) { - StringBuilder sb = new StringBuilder(); - - toString(sb); - - return sb.toString(); - } - - public StringBuilder toString(StringBuilder sb) { - sb.append("RangerPolicyResource={"); - sb.append("values={"); - if(values != null) { - for(String value : values) { - sb.append(value).append(" "); - } - } - sb.append("} "); - sb.append("isExcludes={").append(isExcludes).append("} "); - sb.append("isRecursive={").append(isRecursive).append("} "); - sb.append("}"); - - return sb; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result - + ((isExcludes == null) ? 0 : isExcludes.hashCode()); - result = prime * result - + ((isRecursive == null) ? 0 : isRecursive.hashCode()); - result = prime * result - + ((values == null) ? 0 : values.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - RangerPolicyResource other = (RangerPolicyResource) obj; - if (isExcludes == null) { - if (other.isExcludes != null) - return false; - } else if (!isExcludes.equals(other.isExcludes)) - return false; - if (isRecursive == null) { - if (other.isRecursive != null) - return false; - } else if (!isRecursive.equals(other.isRecursive)) - return false; - if (values == null) { - if (other.values != null) - return false; - } else if (!values.equals(other.values)) - return false; - return true; - } - - } - - @JsonInclude(JsonInclude.Include.NON_NULL) - @XmlRootElement - @XmlAccessorType(XmlAccessType.FIELD) - public static class RangerPolicyItem implements java.io.Serializable { - private static final long serialVersionUID = 1L; - - private List accesses; - private List users; - private List groups; - private List roles; - private List conditions; - private Boolean delegateAdmin; - - public RangerPolicyItem() { - this(null, null, null, null, null, null); - } - - public RangerPolicyItem(List accessTypes, List users, List groups, List roles, List conditions, Boolean delegateAdmin) { - setAccesses(accessTypes); - setUsers(users); - setGroups(groups); - setRoles(roles); - setConditions(conditions); - setDelegateAdmin(delegateAdmin); - } - - /** - * @return the accesses - */ - public List getAccesses() { - return accesses; - } - /** - * @param accesses the accesses to set - */ - public void setAccesses(List accesses) { - if(this.accesses == null) { - this.accesses = new ArrayList<>(); - } - - if(this.accesses == accesses) { - return; - } - - this.accesses.clear(); - - if(accesses != null) { - this.accesses.addAll(accesses); - } - } - /** - * @return the users - */ - public List getUsers() { - return users; - } - /** - * @param users the users to set - */ - public void setUsers(List users) { - if(this.users == null) { - this.users = new ArrayList<>(); - } - - if(this.users == users) { - return; - } - - this.users.clear(); - - if(users != null) { - this.users.addAll(users); - } - } - /** - * @return the groups - */ - public List getGroups() { - return groups; - } - /** - * @param groups the groups to set - */ - public void setGroups(List groups) { - if(this.groups == null) { - this.groups = new ArrayList<>(); - } - - if(this.groups == groups) { - return; - } - - this.groups.clear(); - - if(groups != null) { - this.groups.addAll(groups); - } - } - /** - * @return the roles - */ - public List getRoles() { - return roles; - } - /** - * @param roles the roles to set - */ - public void setRoles(List roles) { - if(this.roles == null) { - this.roles = new ArrayList<>(); - } - - if(this.roles == roles) { - return; - } - - this.roles.clear(); - - if(roles != null) { - this.roles.addAll(roles); - } - } - /** - * @return the conditions - */ - public List getConditions() { - return conditions; - } - /** - * @param conditions the conditions to set - */ - public void setConditions(List conditions) { - if(this.conditions == null) { - this.conditions = new ArrayList<>(); - } - - if(this.conditions == conditions) { - return; - } - - this.conditions.clear(); - - if(conditions != null) { - this.conditions.addAll(conditions); - } - } - - /** - * @return the delegateAdmin - */ - public Boolean getDelegateAdmin() { - return delegateAdmin; - } - - /** - * @param delegateAdmin the delegateAdmin to set - */ - public void setDelegateAdmin(Boolean delegateAdmin) { - this.delegateAdmin = delegateAdmin == null ? Boolean.FALSE : delegateAdmin; - } - - @Override - public String toString( ) { - StringBuilder sb = new StringBuilder(); - - toString(sb); - - return sb.toString(); - } - - public StringBuilder toString(StringBuilder sb) { - sb.append("RangerPolicyItem={"); - - sb.append("accessTypes={"); - if(accesses != null) { - for(RangerPolicyItemAccess access : accesses) { - if(access != null) { - access.toString(sb); - } - } - } - sb.append("} "); - - sb.append("users={"); - if(users != null) { - for(String user : users) { - if(user != null) { - sb.append(user).append(" "); - } - } - } - sb.append("} "); - - sb.append("groups={"); - if(groups != null) { - for(String group : groups) { - if(group != null) { - sb.append(group).append(" "); - } - } - } - sb.append("} "); - - sb.append("roles={"); - if(roles != null) { - for(String role : roles) { - if(role != null) { - sb.append(role).append(" "); - } - } - } - sb.append("} "); - - sb.append("conditions={"); - if(conditions != null) { - for(RangerPolicyItemCondition condition : conditions) { - if(condition != null) { - condition.toString(sb); - } - } - } - sb.append("} "); - - sb.append("delegateAdmin={").append(delegateAdmin).append("} "); - sb.append("}"); - - return sb; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result - + ((accesses == null) ? 0 : accesses.hashCode()); - result = prime * result - + ((conditions == null) ? 0 : conditions.hashCode()); - result = prime * result - + ((delegateAdmin == null) ? 0 : delegateAdmin.hashCode()); - result = prime * result - + ((roles == null) ? 0 : roles.hashCode()); - result = prime * result - + ((groups == null) ? 0 : groups.hashCode()); - result = prime * result + ((users == null) ? 0 : users.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - RangerPolicyItem other = (RangerPolicyItem) obj; - if (accesses == null) { - if (other.accesses != null) - return false; - } else if (!accesses.equals(other.accesses)) - return false; - if (conditions == null) { - if (other.conditions != null) - return false; - } else if (!conditions.equals(other.conditions)) - return false; - if (delegateAdmin == null) { - if (other.delegateAdmin != null) - return false; - } else if (!delegateAdmin.equals(other.delegateAdmin)) - return false; - if (roles == null) { - if (other.roles != null) - return false; - } else if (!roles.equals(other.roles)) - return false; - if (groups == null) { - if (other.groups != null) - return false; - } else if (!groups.equals(other.groups)) - return false; - if (users == null) { - if (other.users != null) - return false; - } else if (!users.equals(other.users)) - return false; - return true; - - } - } - - @JsonInclude(JsonInclude.Include.NON_NULL) - @XmlRootElement - @XmlAccessorType(XmlAccessType.FIELD) - public static class RangerDataMaskPolicyItem extends RangerPolicyItem implements java.io.Serializable { - private static final long serialVersionUID = 1L; - - private RangerPolicyItemDataMaskInfo dataMaskInfo; - - public RangerDataMaskPolicyItem() { - this(null, null, null, null, null, null, null); - } - - public RangerDataMaskPolicyItem(List accesses, RangerPolicyItemDataMaskInfo dataMaskDetail, List users, List groups, List roles, List conditions, Boolean delegateAdmin) { - super(accesses, users, groups, roles, conditions, delegateAdmin); - - setDataMaskInfo(dataMaskDetail); - } - - /** - * @return the dataMaskInfo - */ - public RangerPolicyItemDataMaskInfo getDataMaskInfo() { - return dataMaskInfo; - } - - /** - * @param dataMaskInfo the dataMaskInfo to set - */ - public void setDataMaskInfo(RangerPolicyItemDataMaskInfo dataMaskInfo) { - this.dataMaskInfo = dataMaskInfo == null ? new RangerPolicyItemDataMaskInfo() : dataMaskInfo; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((dataMaskInfo == null) ? 0 : dataMaskInfo.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if(! super.equals(obj)) - return false; - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - RangerDataMaskPolicyItem other = (RangerDataMaskPolicyItem) obj; - if (dataMaskInfo == null) { - if (other.dataMaskInfo != null) - return false; - } else if (!dataMaskInfo.equals(other.dataMaskInfo)) - return false; - return true; - } - - @Override - public String toString( ) { - StringBuilder sb = new StringBuilder(); - - toString(sb); - - return sb.toString(); - } - - public StringBuilder toString(StringBuilder sb) { - sb.append("RangerDataMaskPolicyItem={"); - - super.toString(sb); - - sb.append("dataMaskInfo={"); - if(dataMaskInfo != null) { - dataMaskInfo.toString(sb); - } - sb.append("} "); - - sb.append("}"); - - return sb; - } - } - - @JsonInclude(JsonInclude.Include.NON_NULL) - @XmlRootElement - @XmlAccessorType(XmlAccessType.FIELD) - public static class RangerRowFilterPolicyItem extends RangerPolicyItem implements java.io.Serializable { - private static final long serialVersionUID = 1L; - - private RangerPolicyItemRowFilterInfo rowFilterInfo; - - public RangerRowFilterPolicyItem() { - this(null, null, null, null, null, null, null); - } - - public RangerRowFilterPolicyItem(RangerPolicyItemRowFilterInfo rowFilterInfo, List accesses, List users, List groups, List roles, List conditions, Boolean delegateAdmin) { - super(accesses, users, groups, roles, conditions, delegateAdmin); - - setRowFilterInfo(rowFilterInfo); - } - - /** - * @return the rowFilterInfo - */ - public RangerPolicyItemRowFilterInfo getRowFilterInfo() { - return rowFilterInfo; - } - - /** - * @param rowFilterInfo the rowFilterInfo to set - */ - public void setRowFilterInfo(RangerPolicyItemRowFilterInfo rowFilterInfo) { - this.rowFilterInfo = rowFilterInfo == null ? new RangerPolicyItemRowFilterInfo() : rowFilterInfo; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((rowFilterInfo == null) ? 0 : rowFilterInfo.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if(! super.equals(obj)) - return false; - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - RangerRowFilterPolicyItem other = (RangerRowFilterPolicyItem) obj; - if (rowFilterInfo == null) { - if (other.rowFilterInfo != null) - return false; - } else if (!rowFilterInfo.equals(other.rowFilterInfo)) - return false; - return true; - } - - @Override - public String toString( ) { - StringBuilder sb = new StringBuilder(); - - toString(sb); - - return sb.toString(); - } - - public StringBuilder toString(StringBuilder sb) { - sb.append("RangerRowFilterPolicyItem={"); - - super.toString(sb); - - sb.append("rowFilterInfo={"); - if(rowFilterInfo != null) { - rowFilterInfo.toString(sb); - } - sb.append("} "); - - sb.append("}"); - - return sb; - } - } - - @JsonInclude(JsonInclude.Include.NON_NULL) - @XmlRootElement - @XmlAccessorType(XmlAccessType.FIELD) - public static class RangerPolicyItemAccess implements java.io.Serializable { - private static final long serialVersionUID = 1L; - - private String type; - private Boolean isAllowed; - - public RangerPolicyItemAccess() { - this(null, null); - } - - public RangerPolicyItemAccess(String type) { - this(type, null); - } - - public RangerPolicyItemAccess(String type, Boolean isAllowed) { - setType(type); - setIsAllowed(isAllowed); - } - - /** - * @return the type - */ - public String getType() { - return type; - } - - /** - * @param type the type to set - */ - public void setType(String type) { - this.type = type; - } - - /** - * @return the isAllowed - */ - public Boolean getIsAllowed() { - return isAllowed; - } - - /** - * @param isAllowed the isAllowed to set - */ - public void setIsAllowed(Boolean isAllowed) { - this.isAllowed = isAllowed == null ? Boolean.TRUE : isAllowed; - } - - @Override - public String toString( ) { - StringBuilder sb = new StringBuilder(); - - toString(sb); - - return sb.toString(); - } - - public StringBuilder toString(StringBuilder sb) { - sb.append("RangerPolicyItemAccess={"); - sb.append("type={").append(type).append("} "); - sb.append("isAllowed={").append(isAllowed).append("} "); - sb.append("}"); - - return sb; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result - + ((isAllowed == null) ? 0 : isAllowed.hashCode()); - result = prime * result + ((type == null) ? 0 : type.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - RangerPolicyItemAccess other = (RangerPolicyItemAccess) obj; - if (isAllowed == null) { - if (other.isAllowed != null) - return false; - } else if (!isAllowed.equals(other.isAllowed)) - return false; - if (type == null) { - if (other.type != null) - return false; - } else if (!type.equals(other.type)) - return false; - return true; - } - - } - - @JsonInclude(JsonInclude.Include.NON_NULL) - @XmlRootElement - @XmlAccessorType(XmlAccessType.FIELD) - public static class RangerPolicyItemCondition implements java.io.Serializable { - private static final long serialVersionUID = 1L; - - private String type; - private List values; - - public RangerPolicyItemCondition() { - this(null, null); - } - - public RangerPolicyItemCondition(String type, List values) { - setType(type); - setValues(values); - } - - /** - * @return the type - */ - public String getType() { - return type; - } - - /** - * @param type the type to set - */ - public void setType(String type) { - this.type = type; - } - - /** - * @return the value - */ - public List getValues() { - return values; - } - - /** - * @param values the value to set - */ - public void setValues(List values) { - if (this.values == null) { - this.values = new ArrayList<>(); - } - - if(this.values == values) { - return; - } - - this.values.clear(); - - if(values != null) { - this.values.addAll(values); - } - } - - @Override - public String toString( ) { - StringBuilder sb = new StringBuilder(); - - toString(sb); - - return sb.toString(); - } - - public StringBuilder toString(StringBuilder sb) { - sb.append("RangerPolicyCondition={"); - sb.append("type={").append(type).append("} "); - sb.append("values={"); - if(values != null) { - for(String value : values) { - sb.append(value).append(" "); - } - } - sb.append("} "); - sb.append("}"); - - return sb; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((type == null) ? 0 : type.hashCode()); - result = prime * result - + ((values == null) ? 0 : values.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - RangerPolicyItemCondition other = (RangerPolicyItemCondition) obj; - if (type == null) { - if (other.type != null) - return false; - } else if (!type.equals(other.type)) - return false; - if (values == null) { - if (other.values != null) - return false; - } else if (!values.equals(other.values)) - return false; - return true; - } - - } - - @JsonInclude(JsonInclude.Include.NON_NULL) - @XmlRootElement - @XmlAccessorType(XmlAccessType.FIELD) - public static class RangerPolicyItemDataMaskInfo implements java.io.Serializable { - private static final long serialVersionUID = 1L; - - private String dataMaskType; - private String conditionExpr; - private String valueExpr; - - public RangerPolicyItemDataMaskInfo() { } - - public RangerPolicyItemDataMaskInfo(String dataMaskType, String conditionExpr, String valueExpr) { - setDataMaskType(dataMaskType); - setConditionExpr(conditionExpr); - setValueExpr(valueExpr); - } - - public RangerPolicyItemDataMaskInfo(RangerPolicyItemDataMaskInfo that) { - this.dataMaskType = that.dataMaskType; - this.conditionExpr = that.conditionExpr; - this.valueExpr = that.valueExpr; - } - - public String getDataMaskType() { - return dataMaskType; - } - - public void setDataMaskType(String dataMaskType) { - this.dataMaskType = dataMaskType; - } - - public String getConditionExpr() { - return conditionExpr; - } - - public void setConditionExpr(String conditionExpr) { - this.conditionExpr = conditionExpr; - } - - public String getValueExpr() { - return valueExpr; - } - - public void setValueExpr(String valueExpr) { - this.valueExpr = valueExpr; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((dataMaskType == null) ? 0 : dataMaskType.hashCode()); - result = prime * result + ((conditionExpr == null) ? 0 : conditionExpr.hashCode()); - result = prime * result + ((valueExpr == null) ? 0 : valueExpr.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - RangerPolicyItemDataMaskInfo other = (RangerPolicyItemDataMaskInfo) obj; - if (dataMaskType == null) { - if (other.dataMaskType != null) - return false; - } else if (!dataMaskType.equals(other.dataMaskType)) - return false; - if (conditionExpr == null) { - if (other.conditionExpr != null) - return false; - } else if (!conditionExpr.equals(other.conditionExpr)) - return false; - if (valueExpr == null) { - if (other.valueExpr != null) - return false; - } else if (!valueExpr.equals(other.valueExpr)) - return false; - return true; - } - - @Override - public String toString( ) { - StringBuilder sb = new StringBuilder(); - - toString(sb); - - return sb.toString(); - } - - public StringBuilder toString(StringBuilder sb) { - sb.append("RangerPolicyItemDataMaskInfo={"); - - sb.append("dataMaskType={").append(dataMaskType).append("} "); - sb.append("conditionExpr={").append(conditionExpr).append("} "); - sb.append("valueExpr={").append(valueExpr).append("} "); - - sb.append("}"); - - return sb; - } - } - - @JsonInclude(JsonInclude.Include.NON_NULL) - @XmlRootElement - @XmlAccessorType(XmlAccessType.FIELD) - public static class RangerPolicyItemRowFilterInfo implements java.io.Serializable { - private static final long serialVersionUID = 1L; - - private String filterExpr; - - public RangerPolicyItemRowFilterInfo() { } - - public RangerPolicyItemRowFilterInfo(String filterExpr) { - setFilterExpr(filterExpr); - } - - public RangerPolicyItemRowFilterInfo(RangerPolicyItemRowFilterInfo that) { - this.filterExpr = that.filterExpr; - } - - public String getFilterExpr() { - return filterExpr; - } - - public void setFilterExpr(String filterExpr) { - this.filterExpr = filterExpr; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((filterExpr == null) ? 0 : filterExpr.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - RangerPolicyItemRowFilterInfo other = (RangerPolicyItemRowFilterInfo) obj; - if (filterExpr == null) { - if (other.filterExpr != null) - return false; - } else if (!filterExpr.equals(other.filterExpr)) - return false; - return true; - } - - @Override - public String toString( ) { - StringBuilder sb = new StringBuilder(); - - toString(sb); - - return sb.toString(); - } - - public StringBuilder toString(StringBuilder sb) { - sb.append("RangerPolicyItemRowFilterInfo={"); - - sb.append("filterExpr={").append(filterExpr).append("} "); - - sb.append("}"); - - return sb; - } - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyEngine.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyEngine.java index 98852f910d..332f2fec29 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyEngine.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyEngine.java @@ -62,7 +62,7 @@ public interface RangerPolicyEngine { void setRoles(RangerRoles roles); - RangerAccessResult evaluatePolicies(RangerAccessRequest request, String policyType, RangerAccessResultProcessor resultProcessor); + RangerAccessResult evaluatePolicies(RangerAccessRequest request, String policyType, RangerAccessResultProcessor resultProcessor, String uuid); Collection evaluatePolicies(Collection requests, String policyType, RangerAccessResultProcessor resultProcessor); diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyEngineImpl.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyEngineImpl.java index 90d18bf752..b7edea8b74 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyEngineImpl.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyEngineImpl.java @@ -112,10 +112,11 @@ public String toString() { } @Override - public RangerAccessResult evaluatePolicies(RangerAccessRequest request, String policyType, RangerAccessResultProcessor resultProcessor) { + public RangerAccessResult evaluatePolicies(RangerAccessRequest request, String policyType, RangerAccessResultProcessor resultProcessor, String uuid) { if (LOG.isDebugEnabled()) { LOG.debug("==> RangerPolicyEngineImpl.evaluatePolicies(" + request + ", policyType=" + policyType + ")"); } + long t0 = System.currentTimeMillis(); RangerPerfTracer perf = null; @@ -124,6 +125,7 @@ public RangerAccessResult evaluatePolicies(RangerAccessRequest request, String p perf = RangerPerfTracer.getPerfTracer(PERF_POLICYENGINE_REQUEST_LOG, "RangerPolicyEngine.evaluatePolicies(requestHashCode=" + requestHashCode + ")"); } + LOG.info("evaluatePolicies : perf-trace " + (System.currentTimeMillis() - t0)); RangerAccessResult ret; @@ -135,8 +137,10 @@ public RangerAccessResult evaluatePolicies(RangerAccessRequest request, String p } requestProcessor.preProcess(request); + LOG.info("evaluatePolicies : pre-process " + (System.currentTimeMillis() - t0) + "uuid: " + uuid); ret = zoneAwareAccessEvaluationWithNoAudit(request, policyType); + LOG.info("evaluatePolicies: zoneAwareAccessEval " + (System.currentTimeMillis() - t0)+ "uuid: " + uuid); if (resultProcessor != null) { RangerPerfTracer perfAuditTracer = null; @@ -146,8 +150,10 @@ public RangerAccessResult evaluatePolicies(RangerAccessRequest request, String p perfAuditTracer = RangerPerfTracer.getPerfTracer(PERF_POLICYENGINE_AUDIT_LOG, "RangerPolicyEngine.processAudit(requestHashCode=" + requestHashCode + ")"); } + LOG.info("evaluatePolicies : perf-tracer " + (System.currentTimeMillis() - t0)+ "uuid: " + uuid); resultProcessor.processResult(ret); + LOG.info("evaluatePolicies : pre-process result " + (System.currentTimeMillis() - t0)+ "uuid: " + uuid); RangerPerfTracer.log(perfAuditTracer); } @@ -158,6 +164,7 @@ public RangerAccessResult evaluatePolicies(RangerAccessRequest request, String p if (LOG.isDebugEnabled()) { LOG.debug("<== RangerPolicyEngineImpl.evaluatePolicies(" + request + ", policyType=" + policyType + "): " + ret); } + LOG.info("evaluatePolicies _ final return" + (System.currentTimeMillis() - t0)+ "uuid: " + uuid); return ret; } diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerBasePlugin.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerBasePlugin.java index b224cccc7e..9cbbe74b7d 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerBasePlugin.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerBasePlugin.java @@ -19,6 +19,8 @@ package org.apache.atlas.plugin.service; +import org.apache.atlas.authorizer.store.PoliciesStore; +import org.apache.atlas.authorizer.store.UsersStore; import org.apache.atlas.type.AtlasTypeRegistry; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; @@ -176,6 +178,7 @@ public RangerRoles getRoles() { public void setRoles(RangerRoles roles) { this.roles = roles; + UsersStore.setAllRoles(roles); RangerPolicyEngine policyEngine = this.policyEngine; @@ -192,6 +195,7 @@ public RangerUserStore getUserStore() { public void setUserStore(RangerUserStore userStore) { this.userStore = userStore; + UsersStore.setUserStore(userStore); // RangerPolicyEngine policyEngine = this.policyEngine; @@ -296,6 +300,14 @@ public void setPolicies(ServicePolicies policies) { LOG.debug("==> setPolicies(" + policies + ")"); } + if (policies != null) { + List resourcePolicies = policies.getPolicies(); + List tagPolicies = policies.getTagPolicies().getPolicies(); + + PoliciesStore.setResourcePolicies(resourcePolicies); + PoliciesStore.setTagPolicies(tagPolicies); + } + // guard against catastrophic failure during policy engine Initialization or try { RangerPolicyEngine oldPolicyEngine = this.policyEngine; @@ -461,38 +473,47 @@ public RangerAccessResultProcessor getResultProcessor() { } public RangerAccessResult isAccessAllowed(RangerAccessRequest request) { - return isAccessAllowed(request, resultProcessor); + return isAccessAllowed(request, resultProcessor, ""); } public Collection isAccessAllowed(Collection requests) { return isAccessAllowed(requests, resultProcessor); } - public RangerAccessResult isAccessAllowed(RangerAccessRequest request, RangerAccessResultProcessor resultProcessor) { + public RangerAccessResult isAccessAllowed(RangerAccessRequest request, RangerAccessResultProcessor resultProcessor, String uuid) { + RangerAccessResult ret = null; RangerPolicyEngine policyEngine = this.policyEngine; if (policyEngine != null) { - ret = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_ACCESS, null); + long startTime = System.currentTimeMillis(); + ret = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_ACCESS, null, uuid); + LOG.info("policyEngine.evaluatePolicies ended in "+(System.currentTimeMillis() - startTime)+ " uuid: " + uuid); } if (ret != null) { for (RangerChainedPlugin chainedPlugin : chainedPlugins) { + long startTime = System.currentTimeMillis(); RangerAccessResult chainedResult = chainedPlugin.isAccessAllowed(request); - + LOG.info("chainedPlugin.isAccessAllowed ended in " + (System.currentTimeMillis() - startTime) + "uuid: " + uuid); if (chainedResult != null) { updateResultFromChainedResult(ret, chainedResult); } + LOG.info("chainedPlugin.isAccessAllowed : " + chainedPlugin.getClass().getName() + " ended in " + (System.currentTimeMillis() - startTime) + " uuid: "+ uuid); } } if (policyEngine != null) { + long startTime = System.currentTimeMillis(); policyEngine.evaluateAuditPolicies(ret); + LOG.info("policyEngine.evaluateAuditPolicies ended in " + (System.currentTimeMillis()-startTime) + "uuid: " + uuid); } if (resultProcessor != null) { + long startTime = System.currentTimeMillis(); resultProcessor.processResult(ret); + LOG.info("resultProcessor.processResult ended in " + (System.currentTimeMillis()-startTime) + "uuid: " + uuid); } return ret; @@ -544,7 +565,7 @@ public RangerAccessResult getAssetAccessors(RangerAccessRequest request) { RangerPolicyEngine policyEngine = this.policyEngine; if (policyEngine != null) { - ret = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_ACCESS, null); + ret = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_ACCESS, null, ""); } return ret; @@ -555,7 +576,7 @@ public RangerAccessResult evalDataMaskPolicies(RangerAccessRequest request, Rang RangerAccessResult ret = null; if(policyEngine != null) { - ret = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_DATAMASK, resultProcessor); + ret = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_DATAMASK, resultProcessor, ""); policyEngine.evaluateAuditPolicies(ret); } @@ -568,7 +589,7 @@ public RangerAccessResult evalRowFilterPolicies(RangerAccessRequest request, Ran RangerAccessResult ret = null; if(policyEngine != null) { - ret = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_ROWFILTER, resultProcessor); + ret = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_ROWFILTER, resultProcessor, ""); policyEngine.evaluateAuditPolicies(ret); } @@ -948,7 +969,7 @@ private void auditGrantRevoke(GrantRevokeRequest request, String action, boolean accessRequest.setSessionId(request.getSessionId()); // call isAccessAllowed() to determine if audit is enabled or not - RangerAccessResult accessResult = isAccessAllowed(accessRequest, null); + RangerAccessResult accessResult = isAccessAllowed(accessRequest, null, ""); if(accessResult != null && accessResult.getIsAudited()) { accessRequest.setAccessType(action); diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/KeycloakUserStore.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/KeycloakUserStore.java index b322458543..108e4b6c58 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/KeycloakUserStore.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/KeycloakUserStore.java @@ -119,7 +119,7 @@ public boolean isKeycloakSubjectsStoreUpdated(long cacheLastUpdatedTime) throws if (CollectionUtils.isEmpty(events) || cacheLastUpdatedTime > events.get(0).getTime()) { break; } - + Optional event = events.stream().filter(this::isUpdateProfileEvent).findFirst(); if (event.isPresent()) { diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRESTClient.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRESTClient.java index 94af3510e4..4a265a8430 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRESTClient.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRESTClient.java @@ -19,6 +19,7 @@ package org.apache.atlas.plugin.util; +import com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.sun.jersey.api.client.Client; @@ -36,7 +37,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.atlas.authorization.hadoop.utils.RangerCredentialProvider; import org.apache.atlas.authorization.utils.StringUtil; -import org.codehaus.jackson.jaxrs.JacksonJsonProvider; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.KeyManager; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServicePolicies.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServicePolicies.java index 547349c8f5..9713e85034 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServicePolicies.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServicePolicies.java @@ -20,6 +20,7 @@ package org.apache.atlas.plugin.util; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import org.apache.commons.collections.MapUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -40,6 +41,7 @@ import java.util.List; import java.util.Map; +@JsonIgnoreProperties(ignoreUnknown = true) @JsonInclude(JsonInclude.Include.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) diff --git a/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/AbstractCachePolicyTransformer.java b/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/AbstractCachePolicyTransformer.java index 5c0a16b0b6..cd5dec9e36 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/AbstractCachePolicyTransformer.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/AbstractCachePolicyTransformer.java @@ -30,6 +30,7 @@ public abstract class AbstractCachePolicyTransformer implements CachePolicyTrans public static final String PLACEHOLDER_ENTITY = "{entity}"; public static final String PLACEHOLDER_ENTITY_TYPE = "{entity-type}"; + public static final String PLACEHOLDER_FILTER_CRITERIA = "{criteria}"; public static final String PLACEHOLDER_TAG = "{tag}"; private static Map TEMPLATES = new HashMap<>(); diff --git a/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/CachePolicyTransformerImpl.java b/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/CachePolicyTransformerImpl.java index 6f27c49983..a64dd1738d 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/CachePolicyTransformerImpl.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/CachePolicyTransformerImpl.java @@ -480,6 +480,7 @@ private List getAtlasPolicies(String serviceName, int batchSi List> mustClauseList = new ArrayList<>(); mustClauseList.add(getMap("term", getMap(ATTR_POLICY_SERVICE_NAME, serviceName))); + mustClauseList.add(getMap("term", getMap(ATTR_POLICY_IS_ENABLED, true))); mustClauseList.add(getMap("match", getMap("__state", Id.EntityState.ACTIVE))); dsl.put("query", getMap("bool", getMap("must", mustClauseList))); diff --git a/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/PersonaCachePolicyTransformer.java b/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/PersonaCachePolicyTransformer.java index 6a6d2d3cd9..dc070e2e31 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/PersonaCachePolicyTransformer.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/PersonaCachePolicyTransformer.java @@ -17,6 +17,9 @@ */ package org.apache.atlas.policytransformer; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.atlas.RequestContext; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.instance.AtlasEntity; @@ -33,20 +36,7 @@ import static org.apache.atlas.policytransformer.CachePolicyTransformerImpl.ATTR_NAME; import static org.apache.atlas.policytransformer.CachePolicyTransformerImpl.ATTR_POLICY_RESOURCES; -import static org.apache.atlas.repository.util.AccessControlUtils.ATTR_POLICY_ACTIONS; -import static org.apache.atlas.repository.util.AccessControlUtils.ATTR_POLICY_IS_ENABLED; -import static org.apache.atlas.repository.util.AccessControlUtils.ATTR_POLICY_RESOURCES_CATEGORY; -import static org.apache.atlas.repository.util.AccessControlUtils.POLICY_SUB_CATEGORY_DATA; -import static org.apache.atlas.repository.util.AccessControlUtils.POLICY_SUB_CATEGORY_METADATA; -import static org.apache.atlas.repository.util.AccessControlUtils.RESOURCES_ENTITY; -import static org.apache.atlas.repository.util.AccessControlUtils.RESOURCES_ENTITY_TYPE; -import static org.apache.atlas.repository.util.AccessControlUtils.getEntityByQualifiedName; -import static org.apache.atlas.repository.util.AccessControlUtils.getFilteredPolicyResources; -import static org.apache.atlas.repository.util.AccessControlUtils.getIsPolicyEnabled; -import static org.apache.atlas.repository.util.AccessControlUtils.getPolicyActions; -import static org.apache.atlas.repository.util.AccessControlUtils.getPolicyConnectionQN; -import static org.apache.atlas.repository.util.AccessControlUtils.getPolicyResources; -import static org.apache.atlas.repository.util.AccessControlUtils.getPolicySubCategory; +import static org.apache.atlas.repository.util.AccessControlUtils.*; public class PersonaCachePolicyTransformer extends AbstractCachePolicyTransformer { private static final Logger LOG = LoggerFactory.getLogger(PersonaCachePolicyTransformer.class); @@ -64,12 +54,14 @@ public PersonaCachePolicyTransformer(EntityGraphRetriever entityRetriever) throw public List transform(AtlasEntityHeader atlasPolicy) { AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("PersonaCachePolicyTransformer.transform"); List ret = new ArrayList<>(); - List atlasActions = getPolicyActions(atlasPolicy); List atlasResources = getPolicyResources(atlasPolicy); List entityResources = getFilteredPolicyResources(atlasResources, RESOURCES_ENTITY); List typeResources = getFilteredPolicyResources(atlasResources, RESOURCES_ENTITY_TYPE); + String policyServiceName = getPolicyServiceName(atlasPolicy); + String policyFilterCriteria = getPolicyFilterCriteria(atlasPolicy); + int index = 0; for (String atlasAction : atlasActions) { List currentTemplates = personaTemplate.getTemplate(atlasAction); @@ -83,47 +75,61 @@ public List transform(AtlasEntityHeader atlasPolicy) { AtlasEntityHeader header = new AtlasEntityHeader(atlasPolicy); header.setGuid(atlasPolicy.getGuid() + "-" + index++); - header.setAttribute(ATTR_POLICY_ACTIONS, templatePolicy.getActions()); header.setAttribute(ATTR_POLICY_RESOURCES_CATEGORY, templatePolicy.getPolicyResourceCategory()); header.setAttribute(ATTR_POLICY_IS_ENABLED, getIsPolicyEnabled(atlasPolicy)); + header.setAttribute(ATTR_NAME, "transformed_policy_persona"); - String subCategory = getPolicySubCategory(atlasPolicy); - - List finalResources = new ArrayList<>(); - - for (String templateResource : templatePolicy.getResources()) { - if (templateResource.contains(PLACEHOLDER_ENTITY)) { - for (String entityResource : entityResources) { - finalResources.add(templateResource.replace(PLACEHOLDER_ENTITY, entityResource)); + if (policyServiceName.equals(POLICY_SERVICE_NAME_ABAC)) { + if (policyFilterCriteria != null && !policyFilterCriteria.isEmpty()) { + ObjectMapper mapper = new ObjectMapper(); + try { + JsonNode filterCriteriaNode = mapper.readTree(policyFilterCriteria); + if (filterCriteriaNode != null && filterCriteriaNode.get("entity") != null) { + JsonNode entityFilterCriteriaNode = filterCriteriaNode.get("entity"); + policyFilterCriteria = entityFilterCriteriaNode.toString(); + } + } catch (JsonProcessingException e) { + e.printStackTrace(); } + } + header.setAttribute(ATTR_POLICY_FILTER_CRITERIA, + templatePolicy.getPolicyFilterCriteria().replace(PLACEHOLDER_FILTER_CRITERIA, policyFilterCriteria)); + } else { + String subCategory = getPolicySubCategory(atlasPolicy); - } else if (templateResource.contains(PLACEHOLDER_ENTITY_TYPE)) { - - if (CollectionUtils.isNotEmpty(typeResources)) { - typeResources.forEach(x -> finalResources.add(templateResource.replace(PLACEHOLDER_ENTITY_TYPE, x))); - } else { - boolean isConnection = false; + List finalResources = new ArrayList<>(); - if (POLICY_SUB_CATEGORY_METADATA.equals(subCategory) || POLICY_SUB_CATEGORY_DATA.equals(subCategory)) { - isConnection = isConnectionPolicy(entityResources, atlasPolicy); + for (String templateResource : templatePolicy.getResources()) { + if (templateResource.contains(PLACEHOLDER_ENTITY)) { + for (String entityResource : entityResources) { + finalResources.add(templateResource.replace(PLACEHOLDER_ENTITY, entityResource)); } - if (isConnection) { - finalResources.add(templateResource.replace(PLACEHOLDER_ENTITY_TYPE, "*")); + } else if (templateResource.contains(PLACEHOLDER_ENTITY_TYPE)) { + + if (CollectionUtils.isNotEmpty(typeResources)) { + typeResources.forEach(x -> finalResources.add(templateResource.replace(PLACEHOLDER_ENTITY_TYPE, x))); } else { - finalResources.add(templateResource.replace(PLACEHOLDER_ENTITY_TYPE, "Process")); - finalResources.add(templateResource.replace(PLACEHOLDER_ENTITY_TYPE, "Catalog")); + boolean isConnection = false; + + if (POLICY_SUB_CATEGORY_METADATA.equals(subCategory) || POLICY_SUB_CATEGORY_DATA.equals(subCategory)) { + isConnection = isConnectionPolicy(entityResources, atlasPolicy); + } + + if (isConnection) { + finalResources.add(templateResource.replace(PLACEHOLDER_ENTITY_TYPE, "*")); + } else { + finalResources.add(templateResource.replace(PLACEHOLDER_ENTITY_TYPE, "Process")); + finalResources.add(templateResource.replace(PLACEHOLDER_ENTITY_TYPE, "Catalog")); + } } + } else { + finalResources.add(templateResource); } - } else { - finalResources.add(templateResource); } + header.setAttribute(ATTR_POLICY_RESOURCES, finalResources); } - header.setAttribute(ATTR_POLICY_RESOURCES, finalResources); - - header.setAttribute(ATTR_NAME, "transformed_policy_persona"); - ret.add(header); } } diff --git a/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/PolicyTransformerTemplate.java b/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/PolicyTransformerTemplate.java index 3bde95eef1..13ada447c5 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/PolicyTransformerTemplate.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/policytransformer/PolicyTransformerTemplate.java @@ -63,6 +63,11 @@ public void fromJsonString(String json) { templatePolicy.setPolicyResourceCategory((String) policy.get("policyResourceCategory")); templatePolicy.setPolicyServiceName((String) policy.get("policyServiceName")); + Object filterCriteria = policy.get("policyFilterCriteria"); + if (filterCriteria != null) { + templatePolicy.setPolicyFilterCriteria((String) filterCriteria); + } + policies.add(templatePolicy); } @@ -76,6 +81,7 @@ class TemplatePolicy { private List resources; private List actions; private String policyResourceCategory; + private String policyFilterCriteria; public String getPolicyServiceName() { return policyServiceName; @@ -116,5 +122,13 @@ public List getActions() { public void setActions(List actions) { this.actions = actions; } + + public String getPolicyFilterCriteria() { + return policyFilterCriteria; + } + + public void setPolicyFilterCriteria(String policyFilterCriteria) { + this.policyFilterCriteria = policyFilterCriteria; + } } } diff --git a/auth-agents-common/src/main/resources/service-defs/atlas-servicedef-atlas.json b/auth-agents-common/src/main/resources/service-defs/atlas-servicedef-atlas.json index 0539a562b9..dc8a72c20d 100644 --- a/auth-agents-common/src/main/resources/service-defs/atlas-servicedef-atlas.json +++ b/auth-agents-common/src/main/resources/service-defs/atlas-servicedef-atlas.json @@ -450,6 +450,11 @@ "itemId": 24, "name": "admin-task-cud", "label": "Admin task CUD API" + }, + { + "itemId": 25, + "name": "admin-featureFlag-cud", + "label": "Admin featureflag CUD API" } ], diff --git a/auth-common/pom.xml b/auth-common/pom.xml new file mode 100644 index 0000000000..d816422e69 --- /dev/null +++ b/auth-common/pom.xml @@ -0,0 +1,31 @@ + + + + apache-atlas + org.apache.atlas + 3.0.0-SNAPSHOT + + 4.0.0 + + auth-common + + + + org.apache.htrace + htrace-core4 + 4.1.0-incubating + compile + + + commons-collections + commons-collections + + + commons-lang + commons-lang + + + + \ No newline at end of file diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/GroupInfo.java b/auth-common/src/main/java/org/apache/atlas/plugin/model/GroupInfo.java similarity index 100% rename from auth-agents-common/src/main/java/org/apache/atlas/plugin/model/GroupInfo.java rename to auth-common/src/main/java/org/apache/atlas/plugin/model/GroupInfo.java index ea5630744e..bab8db509d 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/GroupInfo.java +++ b/auth-common/src/main/java/org/apache/atlas/plugin/model/GroupInfo.java @@ -19,8 +19,8 @@ package org.apache.atlas.plugin.model; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; import org.apache.atlas.plugin.util.RangerUserStoreUtil; +import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; diff --git a/auth-common/src/main/java/org/apache/atlas/plugin/model/NewAccessResourceImpl.java b/auth-common/src/main/java/org/apache/atlas/plugin/model/NewAccessResourceImpl.java new file mode 100644 index 0000000000..0841060387 --- /dev/null +++ b/auth-common/src/main/java/org/apache/atlas/plugin/model/NewAccessResourceImpl.java @@ -0,0 +1,253 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.atlas.plugin.model; + +import org.apache.atlas.plugin.model.RangerServiceDef.RangerResourceDef; +import org.apache.commons.lang.ObjectUtils; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class NewAccessResourceImpl { + + private String RESOURCE_SEP = "/"; + private String RESOURCE_NAME_VAL_SEP = "="; + + private String ownerUser; + private Map elements; + private String stringifiedValue; + private String stringifiedCacheKeyValue; + private String leafName; + private RangerServiceDef serviceDef; + + public NewAccessResourceImpl() { + this(null, null); + } + + public NewAccessResourceImpl(Map elements) { + this(elements, null); + } + + public NewAccessResourceImpl(Map elements, String ownerUser) { + this.elements = elements; + this.ownerUser = ownerUser; + } + + public String getOwnerUser() { + return ownerUser; + } + + public boolean exists(String name) { + return elements != null && elements.containsKey(name); + } + + public Object getValue(String name) { + Object ret = null; + + if(elements != null && elements.containsKey(name)) { + ret = elements.get(name); + } + + return ret; + } + + public Set getKeys() { + Set ret = null; + + if(elements != null) { + ret = elements.keySet(); + } + + return ret; + } + + public void setOwnerUser(String ownerUser) { + this.ownerUser = ownerUser; + } + + public void setValue(String name, Object value) { + if(value == null) { + if(elements != null) { + elements.remove(name); + + if(elements.isEmpty()) { + elements = null; + } + } + } else { + if(elements == null) { + elements = new HashMap<>(); + } + elements.put(name, value); + } + + // reset, so that these will be computed again with updated elements + stringifiedValue = stringifiedCacheKeyValue = leafName = null; + } + + public void setServiceDef(final RangerServiceDef serviceDef) { + this.serviceDef = serviceDef; + this.stringifiedValue = this.stringifiedCacheKeyValue = this.leafName = null; + } + + public RangerServiceDef getServiceDef() { + return this.serviceDef; + } + + public String getLeafName() { + String ret = leafName; + + if(ret == null) { + if(serviceDef != null && serviceDef.getResources() != null) { + List resourceDefs = serviceDef.getResources(); + + for(int idx = resourceDefs.size() - 1; idx >= 0; idx--) { + RangerResourceDef resourceDef = resourceDefs.get(idx); + + if(resourceDef != null && exists(resourceDef.getName())) { + ret = leafName = resourceDef.getName(); + break; + } + } + } + } + + return ret; + } + + public String getAsString() { + String ret = stringifiedValue; + + if(ret == null) { + if(serviceDef != null && serviceDef.getResources() != null) { + StringBuilder sb = new StringBuilder(); + + for(RangerResourceDef resourceDef : serviceDef.getResources()) { + if(resourceDef == null || !exists(resourceDef.getName())) { + continue; + } + + if(sb.length() > 0) { + sb.append(RESOURCE_SEP); + } + + sb.append(getValue(resourceDef.getName())); + } + + if(sb.length() > 0) { + ret = stringifiedValue = sb.toString(); + } + } + } + + return ret; + } + + public String getCacheKey() { + String ret = stringifiedCacheKeyValue; + + if(ret == null) { + if(serviceDef != null && serviceDef.getResources() != null) { + StringBuilder sb = new StringBuilder(); + + for(RangerResourceDef resourceDef : serviceDef.getResources()) { + if(resourceDef == null || !exists(resourceDef.getName())) { + continue; + } + + if(sb.length() > 0) { + sb.append(RESOURCE_SEP); + } + + sb.append(resourceDef.getName()).append(RESOURCE_NAME_VAL_SEP).append(getValue(resourceDef.getName())); + } + + if(sb.length() > 0) { + ret = stringifiedCacheKeyValue = sb.toString(); + } + } + } + + return ret; + } + + public Map getAsMap() { + return elements == null ? Collections.EMPTY_MAP : Collections.unmodifiableMap(elements); + } + + public boolean equals(Object obj) { + if(obj == null || !(obj instanceof NewAccessResourceImpl)) { + return false; + } + + if(this == obj) { + return true; + } + + NewAccessResourceImpl other = (NewAccessResourceImpl) obj; + + return ObjectUtils.equals(ownerUser, other.ownerUser) && + ObjectUtils.equals(elements, other.elements); + } + + @Override + public int hashCode() { + int ret = 7; + + ret = 31 * ret + ObjectUtils.hashCode(ownerUser); + ret = 31 * ret + ObjectUtils.hashCode(elements); + + return ret; + } + + @Override + public String toString( ) { + StringBuilder sb = new StringBuilder(); + + toString(sb); + + return sb.toString(); + } + + public StringBuilder toString(StringBuilder sb) { + sb.append("RangerResourceImpl={"); + + sb.append("ownerUser={").append(ownerUser).append("} "); + + sb.append("elements={"); + if(elements != null) { + for(Map.Entry e : elements.entrySet()) { + sb.append(e.getKey()).append("=").append(e.getValue()).append("; "); + } + } + sb.append("} "); + + sb.append("}"); + + return sb; + } + + protected String getStringifiedValue() { return stringifiedValue; } + + protected void setStringifiedValue(String val) { this.stringifiedValue = val; } +} diff --git a/auth-common/src/main/java/org/apache/atlas/plugin/model/RangerBaseModelObject.java b/auth-common/src/main/java/org/apache/atlas/plugin/model/RangerBaseModelObject.java new file mode 100644 index 0000000000..aa8532da15 --- /dev/null +++ b/auth-common/src/main/java/org/apache/atlas/plugin/model/RangerBaseModelObject.java @@ -0,0 +1,170 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.atlas.plugin.model; + +import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlRootElement; +import java.util.Date; + +@JsonInclude(JsonInclude.Include.NON_NULL) +@XmlRootElement +@XmlAccessorType(XmlAccessType.FIELD) +public class RangerBaseModelObject implements java.io.Serializable { + private static final long serialVersionUID = 1L; + + private Long id; + private String guid; + private Boolean isEnabled; + private String createdBy; + private String updatedBy; + private Date createTime; + private Date updateTime; + private Long version; + + public RangerBaseModelObject() { + setIsEnabled(null); + } + + public void updateFrom(RangerBaseModelObject other) { + setIsEnabled(other.getIsEnabled()); + } + + /** + * @return the id + */ + public Long getId() { + return id; + } + /** + * @param id the id to set + */ + public void setId(Long id) { + this.id = id; + } + /** + * @return the guid + */ + public String getGuid() { + return guid; + } + /** + * @param guid the guid to set + */ + public void setGuid(String guid) { + this.guid = guid; + } + /** + * @return the isEnabled + */ + public Boolean getIsEnabled() { + return isEnabled; + } + /** + * @param isEnabled the isEnabled to set + */ + public void setIsEnabled(Boolean isEnabled) { + this.isEnabled = isEnabled == null ? Boolean.TRUE : isEnabled; + } + /** + * @return the createdBy + */ + public String getCreatedBy() { + return createdBy; + } + /** + * @param createdBy the createdBy to set + */ + public void setCreatedBy(String createdBy) { + this.createdBy = createdBy; + } + /** + * @return the updatedBy + */ + public String getUpdatedBy() { + return updatedBy; + } + /** + * @param updatedBy the updatedBy to set + */ + public void setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + } + /** + * @return the createTime + */ + public Date getCreateTime() { + return createTime; + } + /** + * @param createTime the createTime to set + */ + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + /** + * @return the updateTime + */ + public Date getUpdateTime() { + return updateTime; + } + /** + * @param updateTime the updateTime to set + */ + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + /** + * @return the version + */ + public Long getVersion() { + return version; + } + /** + * @param version the version to set + */ + public void setVersion(Long version) { + this.version = version; + } + + @Override + public String toString( ) { + StringBuilder sb = new StringBuilder(); + + toString(sb); + + return sb.toString(); + } + + public StringBuilder toString(StringBuilder sb) { + sb.append("id={").append(id).append("} "); + sb.append("guid={").append(guid).append("} "); + sb.append("isEnabled={").append(isEnabled).append("} "); + sb.append("createdBy={").append(createdBy).append("} "); + sb.append("updatedBy={").append(updatedBy).append("} "); + sb.append("createTime={").append(createTime).append("} "); + sb.append("updateTime={").append(updateTime).append("} "); + sb.append("version={").append(version).append("} "); + + return sb; + } +} diff --git a/auth-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicy.java b/auth-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicy.java new file mode 100644 index 0000000000..be93b492e9 --- /dev/null +++ b/auth-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicy.java @@ -0,0 +1,1708 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.atlas.plugin.model; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonIgnoreProperties; +import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlRootElement; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +@JsonIgnoreProperties(ignoreUnknown = true) +@JsonInclude(JsonInclude.Include.NON_NULL) +@XmlRootElement +@XmlAccessorType(XmlAccessType.FIELD) +public class RangerPolicy extends RangerBaseModelObject implements java.io.Serializable { + public static final String POLICY_TYPE_ACCESS = "ACCESS"; + public static final String POLICY_TYPE_DATAMASK = "DATA_MASK"; + public static final String POLICY_TYPE_ROWFILTER = "ROW_FILTER"; + public static final String POLICY_TYPE_AUDIT = "AUDIT"; + + public static final String[] POLICY_TYPES = new String[] { + POLICY_TYPE_ACCESS, + POLICY_TYPE_DATAMASK, + POLICY_TYPE_ROWFILTER + }; + + public static final String MASK_TYPE_NULL = "MASK_NULL"; + public static final String MASK_TYPE_NONE = "MASK_NONE"; + public static final String MASK_TYPE_CUSTOM = "CUSTOM"; + + public static final int POLICY_PRIORITY_NORMAL = 0; + public static final int POLICY_PRIORITY_OVERRIDE = 1; + + public static final String POLICY_PRIORITY_NAME_NORMAL = "NORMAL"; + public static final String POLICY_PRIORITY_NAME_OVERRIDE = "OVERRIDE"; + + public static final Comparator POLICY_ID_COMPARATOR = new PolicyIdComparator(); + + // For future use + private static final long serialVersionUID = 1L; + + private String service; + private String name; + private String policyType; + private Integer policyPriority; + private String description; + private String resourceSignature; + private Boolean isAuditEnabled; + private Map resources; + private List conditions; + private List policyItems; + private List denyPolicyItems; + private List allowExceptions; + private List denyExceptions; + private List dataMaskPolicyItems; + private List rowFilterPolicyItems; + private String serviceType; + private Map options; + private List validitySchedules; + private List policyLabels; + private String zoneName; + private Boolean isDenyAllElse; + private Map attributes; + + public RangerPolicy() { + this(null, null, null, null, null, null, null, null, null, null, null); + } + + public RangerPolicy(String service, String name, String policyType, Integer policyPriority, String description, Map resources, List policyItems, String resourceSignature, Map options, List validitySchedules, List policyLables) { + this(service, name, policyType, policyPriority, description, resources, policyItems, resourceSignature, options, validitySchedules, policyLables, null); + } + + public RangerPolicy(String service, String name, String policyType, Integer policyPriority, String description, Map resources, List policyItems, String resourceSignature, Map options, List validitySchedules, List policyLables, String zoneName) { + this(service, name, policyType, policyPriority, description, resources, policyItems, resourceSignature, options, validitySchedules, policyLables, zoneName, null); + } + + public RangerPolicy(String service, String name, String policyType, Integer policyPriority, String description, Map resources, List policyItems, String resourceSignature, Map options, List validitySchedules, List policyLables, String zoneName, List conditions) { + this(service, name, policyType, policyPriority, description, resources, policyItems, resourceSignature, options, validitySchedules, policyLables, zoneName, conditions, null); + } + + public RangerPolicy(String service, String name, String policyType, Integer policyPriority, String description, Map resources, List policyItems, String resourceSignature, Map options, List validitySchedules, List policyLables, String zoneName, List conditions, Boolean isDenyAllElse) { + this(service, name, policyType, policyPriority, description, resources, policyItems, resourceSignature, options, validitySchedules, policyLables, zoneName, conditions, null, null, null); + } + + /** + * @param service + * @param name + * @param policyType + * @param description + * @param resources + * @param policyItems + * @param resourceSignature TODO + */ + public RangerPolicy(String service, String name, String policyType, Integer policyPriority, String description, Map resources, List policyItems, String resourceSignature, Map options, List validitySchedules, List policyLables, String zoneName, List conditions, Boolean isDenyAllElse, String policyFilterCriteria, String policyResourceCategory) { + super(); + + setService(service); + setName(name); + setPolicyType(policyType); + setPolicyPriority(policyPriority); + setDescription(description); + setResourceSignature(resourceSignature); + setIsAuditEnabled(null); + setResources(resources); + setPolicyItems(policyItems); + setDenyPolicyItems(null); + setAllowExceptions(null); + setDenyExceptions(null); + setDataMaskPolicyItems(null); + setRowFilterPolicyItems(null); + setOptions(options); + setValiditySchedules(validitySchedules); + setPolicyLabels(policyLables); + setZoneName(zoneName); + setConditions(conditions); + setIsDenyAllElse(isDenyAllElse); + + } + + /** + * @param other + */ + public void updateFrom(RangerPolicy other) { + super.updateFrom(other); + + setService(other.getService()); + setName(other.getName()); + setPolicyType(other.getPolicyType()); + setPolicyPriority(other.getPolicyPriority()); + setDescription(other.getDescription()); + setResourceSignature(other.getResourceSignature()); + setIsAuditEnabled(other.getIsAuditEnabled()); + setResources(other.getResources()); + setConditions(other.getConditions()); + setPolicyItems(other.getPolicyItems()); + setDenyPolicyItems(other.getDenyPolicyItems()); + setAllowExceptions(other.getAllowExceptions()); + setDenyExceptions(other.getDenyExceptions()); + setDataMaskPolicyItems(other.getDataMaskPolicyItems()); + setRowFilterPolicyItems(other.getRowFilterPolicyItems()); + setServiceType(other.getServiceType()); + setOptions(other.getOptions()); + setValiditySchedules(other.getValiditySchedules()); + setPolicyLabels(other.getPolicyLabels()); + setZoneName(other.getZoneName()); + setIsDenyAllElse(other.getIsDenyAllElse()); + } + + public Map getAttributes() { + return attributes; + } + + public void setAttributes(Map attributes) { + this.attributes = attributes; + } + + /** + * @return the type + */ + public String getService() { + return service; + } + + /** + * @param service the type to set + */ + public void setService(String service) { + this.service = service; + } + + /** + * @return the name + */ + public String getName() { + return name; + } + + /** + * @param name the name to set + */ + public void setName(String name) { + this.name = name; + } + + /** + * @return the policyType + */ + public String getPolicyType() { + return policyType; + } + + /** + * @param policyType the policyType to set + */ + public void setPolicyType(String policyType) { + this.policyType = policyType; + } + + /** + * @return the policyPriority + */ + public Integer getPolicyPriority() { + return policyPriority; + } + + /** + * @param policyPriority the policyPriority to set + */ + public void setPolicyPriority(Integer policyPriority) { + this.policyPriority = policyPriority == null ? RangerPolicy.POLICY_PRIORITY_NORMAL : policyPriority; + } + + /** + * @return the description + */ + public String getDescription() { + return description; + } + + /** + * @param description the description to set + */ + public void setDescription(String description) { + this.description = description; + } + + /** + * @return the resourceSignature + */ + public String getResourceSignature() { + return resourceSignature; + } + + /** + * @param resourceSignature the resourceSignature to set + */ + public void setResourceSignature(String resourceSignature) { + this.resourceSignature = resourceSignature; + } + + /** + * @return the isAuditEnabled + */ + public Boolean getIsAuditEnabled() { + return isAuditEnabled; + } + + /** + * @param isAuditEnabled the isEnabled to set + */ + public void setIsAuditEnabled(Boolean isAuditEnabled) { + this.isAuditEnabled = isAuditEnabled == null ? Boolean.TRUE : isAuditEnabled; + } + + public String getServiceType() { + return serviceType; + } + + public void setServiceType(String serviceType) { + this.serviceType = serviceType; + } + + public List getPolicyLabels() { + return policyLabels; + } + + public void setPolicyLabels(List policyLabels) { + if (this.policyLabels == null) { + this.policyLabels = new ArrayList<>(); + } + + if (this.policyLabels == policyLabels) { + return; + } + + this.policyLabels.clear(); + + if (policyLabels != null) { + this.policyLabels.addAll(policyLabels); + } + } + + /** + * @return the resources + */ + public Map getResources() { + return resources; + } + + /** + * @param resources the resources to set + */ + public void setResources(Map resources) { + if(this.resources == null) { + this.resources = new HashMap<>(); + } + + if(this.resources == resources) { + return; + } + + this.resources.clear(); + + if(resources != null) { + for(Map.Entry e : resources.entrySet()) { + this.resources.put(e.getKey(), e.getValue()); + } + } + } + + /** + * @return the policyItems + */ + public List getPolicyItems() { + return policyItems; + } + + /** + * @param policyItems the policyItems to set + */ + public void setPolicyItems(List policyItems) { + if(this.policyItems == null) { + this.policyItems = new ArrayList<>(); + } + + if(this.policyItems == policyItems) { + return; + } + + this.policyItems.clear(); + + if(policyItems != null) { + this.policyItems.addAll(policyItems); + } + } + + /** + * @return the denyPolicyItems + */ + public List getDenyPolicyItems() { + return denyPolicyItems; + } + + /** + * @param denyPolicyItems the denyPolicyItems to set + */ + public void setDenyPolicyItems(List denyPolicyItems) { + if(this.denyPolicyItems == null) { + this.denyPolicyItems = new ArrayList<>(); + } + + if(this.denyPolicyItems == denyPolicyItems) { + return; + } + + this.denyPolicyItems.clear(); + + if(denyPolicyItems != null) { + this.denyPolicyItems.addAll(denyPolicyItems); + } + } + + /** + * @return the allowExceptions + */ + public List getAllowExceptions() { + return allowExceptions; + } + + /** + * @param allowExceptions the allowExceptions to set + */ + public void setAllowExceptions(List allowExceptions) { + if(this.allowExceptions == null) { + this.allowExceptions = new ArrayList<>(); + } + + if(this.allowExceptions == allowExceptions) { + return; + } + + this.allowExceptions.clear(); + + if(allowExceptions != null) { + this.allowExceptions.addAll(allowExceptions); + } + } + + /** + * @return the denyExceptions + */ + public List getDenyExceptions() { + return denyExceptions; + } + + /** + * @param denyExceptions the denyExceptions to set + */ + public void setDenyExceptions(List denyExceptions) { + if(this.denyExceptions == null) { + this.denyExceptions = new ArrayList<>(); + } + + if(this.denyExceptions == denyExceptions) { + return; + } + + this.denyExceptions.clear(); + + if(denyExceptions != null) { + this.denyExceptions.addAll(denyExceptions); + } + } + + public List getDataMaskPolicyItems() { + return dataMaskPolicyItems; + } + + public void setDataMaskPolicyItems(List dataMaskPolicyItems) { + if(this.dataMaskPolicyItems == null) { + this.dataMaskPolicyItems = new ArrayList<>(); + } + + if(this.dataMaskPolicyItems == dataMaskPolicyItems) { + return; + } + + this.dataMaskPolicyItems.clear(); + + if(dataMaskPolicyItems != null) { + this.dataMaskPolicyItems.addAll(dataMaskPolicyItems); + } + } + + public List getRowFilterPolicyItems() { + return rowFilterPolicyItems; + } + + public void setRowFilterPolicyItems(List rowFilterPolicyItems) { + if(this.rowFilterPolicyItems == null) { + this.rowFilterPolicyItems = new ArrayList<>(); + } + + if(this.rowFilterPolicyItems == rowFilterPolicyItems) { + return; + } + + this.rowFilterPolicyItems.clear(); + + if(rowFilterPolicyItems != null) { + this.rowFilterPolicyItems.addAll(rowFilterPolicyItems); + } + } + + public Map getOptions() { return options; } + + public void setOptions(Map options) { + if (this.options == null) { + this.options = new HashMap<>(); + } + if (this.options == options) { + return; + } + this.options.clear(); + + if(options != null) { + for(Map.Entry e : options.entrySet()) { + this.options.put(e.getKey(), e.getValue()); + } + } + } + + public List getValiditySchedules() { return validitySchedules; } + + public void setValiditySchedules(List validitySchedules) { + if (this.validitySchedules == null) { + this.validitySchedules = new ArrayList<>(); + } + if (this.validitySchedules == validitySchedules) { + return; + } + this.validitySchedules.clear(); + + if(validitySchedules != null) { + this.validitySchedules.addAll(validitySchedules); + } + } + public String getZoneName() { return zoneName; } + + public void setZoneName(String zoneName) { + this.zoneName = zoneName; + } + + /** + * @return the conditions + */ + public List getConditions() { return conditions; } + /** + * @param conditions the conditions to set + */ + public void setConditions(List conditions) { + this.conditions = conditions; + } + + public Boolean getIsDenyAllElse() { + return isDenyAllElse; + } + + public void setIsDenyAllElse(Boolean isDenyAllElse) { + this.isDenyAllElse = isDenyAllElse == null ? Boolean.FALSE : isDenyAllElse; + } + + @Override + public String toString( ) { + StringBuilder sb = new StringBuilder(); + + toString(sb); + + return sb.toString(); + } + + public StringBuilder toString(StringBuilder sb) { + sb.append("RangerPolicy={"); + + super.toString(sb); + + sb.append("service={").append(service).append("} "); + sb.append("name={").append(name).append("} "); + sb.append("policyType={").append(policyType).append("} "); + sb.append("policyPriority={").append(policyPriority).append("} "); + sb.append("description={").append(description).append("} "); + sb.append("resourceSignature={").append(resourceSignature).append("} "); + sb.append("isAuditEnabled={").append(isAuditEnabled).append("} "); + sb.append("serviceType={").append(serviceType).append("} "); + + sb.append("resources={"); + if(resources != null) { + for(Map.Entry e : resources.entrySet()) { + sb.append(e.getKey()).append("={"); + e.getValue().toString(sb); + sb.append("} "); + } + } + sb.append("} "); + sb.append("policyLabels={"); + if(policyLabels != null) { + for(String policyLabel : policyLabels) { + if(policyLabel != null) { + sb.append(policyLabel).append(" "); + } + } + } + sb.append("} "); + + sb.append("policyConditions={"); + if(conditions != null) { + for(RangerPolicyItemCondition condition : conditions) { + if(condition != null) { + condition.toString(sb); + } + } + } + sb.append("} "); + + sb.append("policyItems={"); + if(policyItems != null) { + for(RangerPolicyItem policyItem : policyItems) { + if(policyItem != null) { + policyItem.toString(sb); + } + } + } + sb.append("} "); + + sb.append("denyPolicyItems={"); + if(denyPolicyItems != null) { + for(RangerPolicyItem policyItem : denyPolicyItems) { + if(policyItem != null) { + policyItem.toString(sb); + } + } + } + sb.append("} "); + + sb.append("allowExceptions={"); + if(allowExceptions != null) { + for(RangerPolicyItem policyItem : allowExceptions) { + if(policyItem != null) { + policyItem.toString(sb); + } + } + } + sb.append("} "); + + sb.append("denyExceptions={"); + if(denyExceptions != null) { + for(RangerPolicyItem policyItem : denyExceptions) { + if(policyItem != null) { + policyItem.toString(sb); + } + } + } + sb.append("} "); + + sb.append("dataMaskPolicyItems={"); + if(dataMaskPolicyItems != null) { + for(RangerDataMaskPolicyItem dataMaskPolicyItem : dataMaskPolicyItems) { + if(dataMaskPolicyItem != null) { + dataMaskPolicyItem.toString(sb); + } + } + } + sb.append("} "); + + sb.append("rowFilterPolicyItems={"); + if(rowFilterPolicyItems != null) { + for(RangerRowFilterPolicyItem rowFilterPolicyItem : rowFilterPolicyItems) { + if(rowFilterPolicyItem != null) { + rowFilterPolicyItem.toString(sb); + } + } + } + sb.append("} "); + + sb.append("options={"); + if(options != null) { + for(Map.Entry e : options.entrySet()) { + sb.append(e.getKey()).append("={"); + sb.append(e.getValue().toString()); + sb.append("} "); + } + } + sb.append("} "); + + //sb.append("validitySchedules={").append(validitySchedules).append("} "); + sb.append("validitySchedules={"); + if (CollectionUtils.isNotEmpty(validitySchedules)) { + for (RangerValiditySchedule schedule : validitySchedules) { + if (schedule != null) { + sb.append("schedule={").append(schedule).append("}"); + } + } + } + sb.append(", zoneName=").append(zoneName); + + sb.append(", isDenyAllElse={").append(isDenyAllElse).append("} "); + + sb.append("}"); + + sb.append("}"); + + return sb; + } + + static class PolicyIdComparator implements Comparator, java.io.Serializable { + @Override + public int compare(RangerPolicy me, RangerPolicy other) { + return Long.compare(me.getId(), other.getId()); + } + } + + @JsonInclude(JsonInclude.Include.NON_NULL) + @XmlRootElement + @XmlAccessorType(XmlAccessType.FIELD) + public static class RangerPolicyResource implements java.io.Serializable { + private static final long serialVersionUID = 1L; + + private List values; + private Boolean isExcludes; + private Boolean isRecursive; + + public RangerPolicyResource() { + this((List)null, null, null); + } + + public RangerPolicyResource(String value) { + setValue(value); + setIsExcludes(null); + setIsRecursive(null); + } + + public RangerPolicyResource(String value, Boolean isExcludes, Boolean isRecursive) { + setValue(value); + setIsExcludes(isExcludes); + setIsRecursive(isRecursive); + } + + public RangerPolicyResource(List values, Boolean isExcludes, Boolean isRecursive) { + setValues(values); + setIsExcludes(isExcludes); + setIsRecursive(isRecursive); + } + + /** + * @return the values + */ + public List getValues() { + return values; + } + + /** + * @param values the values to set + */ + public void setValues(List values) { + if(this.values == null) { + this.values = new ArrayList<>(); + } + + if(this.values == values) { + return; + } + + this.values.clear(); + + if(values != null) { + this.values.addAll(values); + } + } + + /** + * @param value the value to set + */ + public void setValue(String value) { + if(this.values == null) { + this.values = new ArrayList<>(); + } + + this.values.clear(); + + this.values.add(value); + } + + /** + * @return the isExcludes + */ + public Boolean getIsExcludes() { + return isExcludes; + } + + /** + * @param isExcludes the isExcludes to set + */ + public void setIsExcludes(Boolean isExcludes) { + this.isExcludes = isExcludes == null ? Boolean.FALSE : isExcludes; + } + + /** + * @return the isRecursive + */ + public Boolean getIsRecursive() { + return isRecursive; + } + + /** + * @param isRecursive the isRecursive to set + */ + public void setIsRecursive(Boolean isRecursive) { + this.isRecursive = isRecursive == null ? Boolean.FALSE : isRecursive; + } + + @Override + public String toString( ) { + StringBuilder sb = new StringBuilder(); + + toString(sb); + + return sb.toString(); + } + + public StringBuilder toString(StringBuilder sb) { + sb.append("RangerPolicyResource={"); + sb.append("values={"); + if(values != null) { + for(String value : values) { + sb.append(value).append(" "); + } + } + sb.append("} "); + sb.append("isExcludes={").append(isExcludes).append("} "); + sb.append("isRecursive={").append(isRecursive).append("} "); + sb.append("}"); + + return sb; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + + ((isExcludes == null) ? 0 : isExcludes.hashCode()); + result = prime * result + + ((isRecursive == null) ? 0 : isRecursive.hashCode()); + result = prime * result + + ((values == null) ? 0 : values.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + RangerPolicyResource other = (RangerPolicyResource) obj; + if (isExcludes == null) { + if (other.isExcludes != null) + return false; + } else if (!isExcludes.equals(other.isExcludes)) + return false; + if (isRecursive == null) { + if (other.isRecursive != null) + return false; + } else if (!isRecursive.equals(other.isRecursive)) + return false; + if (values == null) { + if (other.values != null) + return false; + } else if (!values.equals(other.values)) + return false; + return true; + } + + } + + @JsonInclude(JsonInclude.Include.NON_NULL) + @XmlRootElement + @XmlAccessorType(XmlAccessType.FIELD) + public static class RangerPolicyItem implements java.io.Serializable { + private static final long serialVersionUID = 1L; + + private List accesses; + private List users; + private List groups; + private List roles; + private List conditions; + private Boolean delegateAdmin; + + public RangerPolicyItem() { + this(null, null, null, null, null, null); + } + + public RangerPolicyItem(List accessTypes, List users, List groups, List roles, List conditions, Boolean delegateAdmin) { + setAccesses(accessTypes); + setUsers(users); + setGroups(groups); + setRoles(roles); + setConditions(conditions); + setDelegateAdmin(delegateAdmin); + } + + /** + * @return the accesses + */ + public List getAccesses() { + return accesses; + } + /** + * @param accesses the accesses to set + */ + public void setAccesses(List accesses) { + if(this.accesses == null) { + this.accesses = new ArrayList<>(); + } + + if(this.accesses == accesses) { + return; + } + + this.accesses.clear(); + + if(accesses != null) { + this.accesses.addAll(accesses); + } + } + /** + * @return the users + */ + public List getUsers() { + return users; + } + /** + * @param users the users to set + */ + public void setUsers(List users) { + if(this.users == null) { + this.users = new ArrayList<>(); + } + + if(this.users == users) { + return; + } + + this.users.clear(); + + if(users != null) { + this.users.addAll(users); + } + } + /** + * @return the groups + */ + public List getGroups() { + return groups; + } + /** + * @param groups the groups to set + */ + public void setGroups(List groups) { + if(this.groups == null) { + this.groups = new ArrayList<>(); + } + + if(this.groups == groups) { + return; + } + + this.groups.clear(); + + if(groups != null) { + this.groups.addAll(groups); + } + } + /** + * @return the roles + */ + public List getRoles() { + return roles; + } + /** + * @param roles the roles to set + */ + public void setRoles(List roles) { + if(this.roles == null) { + this.roles = new ArrayList<>(); + } + + if(this.roles == roles) { + return; + } + + this.roles.clear(); + + if(roles != null) { + this.roles.addAll(roles); + } + } + /** + * @return the conditions + */ + public List getConditions() { + return conditions; + } + /** + * @param conditions the conditions to set + */ + public void setConditions(List conditions) { + if(this.conditions == null) { + this.conditions = new ArrayList<>(); + } + + if(this.conditions == conditions) { + return; + } + + this.conditions.clear(); + + if(conditions != null) { + this.conditions.addAll(conditions); + } + } + + /** + * @return the delegateAdmin + */ + public Boolean getDelegateAdmin() { + return delegateAdmin; + } + + /** + * @param delegateAdmin the delegateAdmin to set + */ + public void setDelegateAdmin(Boolean delegateAdmin) { + this.delegateAdmin = delegateAdmin == null ? Boolean.FALSE : delegateAdmin; + } + + @Override + public String toString( ) { + StringBuilder sb = new StringBuilder(); + + toString(sb); + + return sb.toString(); + } + + public StringBuilder toString(StringBuilder sb) { + sb.append("RangerPolicyItem={"); + + sb.append("accessTypes={"); + if(accesses != null) { + for(RangerPolicyItemAccess access : accesses) { + if(access != null) { + access.toString(sb); + } + } + } + sb.append("} "); + + sb.append("users={"); + if(users != null) { + for(String user : users) { + if(user != null) { + sb.append(user).append(" "); + } + } + } + sb.append("} "); + + sb.append("groups={"); + if(groups != null) { + for(String group : groups) { + if(group != null) { + sb.append(group).append(" "); + } + } + } + sb.append("} "); + + sb.append("roles={"); + if(roles != null) { + for(String role : roles) { + if(role != null) { + sb.append(role).append(" "); + } + } + } + sb.append("} "); + + sb.append("conditions={"); + if(conditions != null) { + for(RangerPolicyItemCondition condition : conditions) { + if(condition != null) { + condition.toString(sb); + } + } + } + sb.append("} "); + + sb.append("delegateAdmin={").append(delegateAdmin).append("} "); + sb.append("}"); + + return sb; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + + ((accesses == null) ? 0 : accesses.hashCode()); + result = prime * result + + ((conditions == null) ? 0 : conditions.hashCode()); + result = prime * result + + ((delegateAdmin == null) ? 0 : delegateAdmin.hashCode()); + result = prime * result + + ((roles == null) ? 0 : roles.hashCode()); + result = prime * result + + ((groups == null) ? 0 : groups.hashCode()); + result = prime * result + ((users == null) ? 0 : users.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + RangerPolicyItem other = (RangerPolicyItem) obj; + if (accesses == null) { + if (other.accesses != null) + return false; + } else if (!accesses.equals(other.accesses)) + return false; + if (conditions == null) { + if (other.conditions != null) + return false; + } else if (!conditions.equals(other.conditions)) + return false; + if (delegateAdmin == null) { + if (other.delegateAdmin != null) + return false; + } else if (!delegateAdmin.equals(other.delegateAdmin)) + return false; + if (roles == null) { + if (other.roles != null) + return false; + } else if (!roles.equals(other.roles)) + return false; + if (groups == null) { + if (other.groups != null) + return false; + } else if (!groups.equals(other.groups)) + return false; + if (users == null) { + if (other.users != null) + return false; + } else if (!users.equals(other.users)) + return false; + return true; + + } + } + + @JsonInclude(JsonInclude.Include.NON_NULL) + @XmlRootElement + @XmlAccessorType(XmlAccessType.FIELD) + public static class RangerDataMaskPolicyItem extends RangerPolicyItem implements java.io.Serializable { + private static final long serialVersionUID = 1L; + + private RangerPolicyItemDataMaskInfo dataMaskInfo; + + public RangerDataMaskPolicyItem() { + this(null, null, null, null, null, null, null); + } + + public RangerDataMaskPolicyItem(List accesses, RangerPolicyItemDataMaskInfo dataMaskDetail, List users, List groups, List roles, List conditions, Boolean delegateAdmin) { + super(accesses, users, groups, roles, conditions, delegateAdmin); + + setDataMaskInfo(dataMaskDetail); + } + + /** + * @return the dataMaskInfo + */ + public RangerPolicyItemDataMaskInfo getDataMaskInfo() { + return dataMaskInfo; + } + + /** + * @param dataMaskInfo the dataMaskInfo to set + */ + public void setDataMaskInfo(RangerPolicyItemDataMaskInfo dataMaskInfo) { + this.dataMaskInfo = dataMaskInfo == null ? new RangerPolicyItemDataMaskInfo() : dataMaskInfo; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + ((dataMaskInfo == null) ? 0 : dataMaskInfo.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if(! super.equals(obj)) + return false; + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + RangerDataMaskPolicyItem other = (RangerDataMaskPolicyItem) obj; + if (dataMaskInfo == null) { + if (other.dataMaskInfo != null) + return false; + } else if (!dataMaskInfo.equals(other.dataMaskInfo)) + return false; + return true; + } + + @Override + public String toString( ) { + StringBuilder sb = new StringBuilder(); + + toString(sb); + + return sb.toString(); + } + + public StringBuilder toString(StringBuilder sb) { + sb.append("RangerDataMaskPolicyItem={"); + + super.toString(sb); + + sb.append("dataMaskInfo={"); + if(dataMaskInfo != null) { + dataMaskInfo.toString(sb); + } + sb.append("} "); + + sb.append("}"); + + return sb; + } + } + + @JsonInclude(JsonInclude.Include.NON_NULL) + @XmlRootElement + @XmlAccessorType(XmlAccessType.FIELD) + public static class RangerRowFilterPolicyItem extends RangerPolicyItem implements java.io.Serializable { + private static final long serialVersionUID = 1L; + + private RangerPolicyItemRowFilterInfo rowFilterInfo; + + public RangerRowFilterPolicyItem() { + this(null, null, null, null, null, null, null); + } + + public RangerRowFilterPolicyItem(RangerPolicyItemRowFilterInfo rowFilterInfo, List accesses, List users, List groups, List roles, List conditions, Boolean delegateAdmin) { + super(accesses, users, groups, roles, conditions, delegateAdmin); + + setRowFilterInfo(rowFilterInfo); + } + + /** + * @return the rowFilterInfo + */ + public RangerPolicyItemRowFilterInfo getRowFilterInfo() { + return rowFilterInfo; + } + + /** + * @param rowFilterInfo the rowFilterInfo to set + */ + public void setRowFilterInfo(RangerPolicyItemRowFilterInfo rowFilterInfo) { + this.rowFilterInfo = rowFilterInfo == null ? new RangerPolicyItemRowFilterInfo() : rowFilterInfo; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + ((rowFilterInfo == null) ? 0 : rowFilterInfo.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if(! super.equals(obj)) + return false; + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + RangerRowFilterPolicyItem other = (RangerRowFilterPolicyItem) obj; + if (rowFilterInfo == null) { + if (other.rowFilterInfo != null) + return false; + } else if (!rowFilterInfo.equals(other.rowFilterInfo)) + return false; + return true; + } + + @Override + public String toString( ) { + StringBuilder sb = new StringBuilder(); + + toString(sb); + + return sb.toString(); + } + + public StringBuilder toString(StringBuilder sb) { + sb.append("RangerRowFilterPolicyItem={"); + + super.toString(sb); + + sb.append("rowFilterInfo={"); + if(rowFilterInfo != null) { + rowFilterInfo.toString(sb); + } + sb.append("} "); + + sb.append("}"); + + return sb; + } + } + + @JsonInclude(JsonInclude.Include.NON_NULL) + @XmlRootElement + @XmlAccessorType(XmlAccessType.FIELD) + public static class RangerPolicyItemAccess implements java.io.Serializable { + private static final long serialVersionUID = 1L; + + private String type; + private Boolean isAllowed; + + public RangerPolicyItemAccess() { + this(null, null); + } + + public RangerPolicyItemAccess(String type) { + this(type, null); + } + + public RangerPolicyItemAccess(String type, Boolean isAllowed) { + setType(type); + setIsAllowed(isAllowed); + } + + /** + * @return the type + */ + public String getType() { + return type; + } + + /** + * @param type the type to set + */ + public void setType(String type) { + this.type = type; + } + + /** + * @return the isAllowed + */ + public Boolean getIsAllowed() { + return isAllowed; + } + + /** + * @param isAllowed the isAllowed to set + */ + public void setIsAllowed(Boolean isAllowed) { + this.isAllowed = isAllowed == null ? Boolean.TRUE : isAllowed; + } + + @Override + public String toString( ) { + StringBuilder sb = new StringBuilder(); + + toString(sb); + + return sb.toString(); + } + + public StringBuilder toString(StringBuilder sb) { + sb.append("RangerPolicyItemAccess={"); + sb.append("type={").append(type).append("} "); + sb.append("isAllowed={").append(isAllowed).append("} "); + sb.append("}"); + + return sb; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + + ((isAllowed == null) ? 0 : isAllowed.hashCode()); + result = prime * result + ((type == null) ? 0 : type.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + RangerPolicyItemAccess other = (RangerPolicyItemAccess) obj; + if (isAllowed == null) { + if (other.isAllowed != null) + return false; + } else if (!isAllowed.equals(other.isAllowed)) + return false; + if (type == null) { + if (other.type != null) + return false; + } else if (!type.equals(other.type)) + return false; + return true; + } + + } + + @JsonInclude(JsonInclude.Include.NON_NULL) + @XmlRootElement + @XmlAccessorType(XmlAccessType.FIELD) + public static class RangerPolicyItemCondition implements java.io.Serializable { + private static final long serialVersionUID = 1L; + + private String type; + private List values; + + public RangerPolicyItemCondition() { + this(null, null); + } + + public RangerPolicyItemCondition(String type, List values) { + setType(type); + setValues(values); + } + + /** + * @return the type + */ + public String getType() { + return type; + } + + /** + * @param type the type to set + */ + public void setType(String type) { + this.type = type; + } + + /** + * @return the value + */ + public List getValues() { + return values; + } + + /** + * @param values the value to set + */ + public void setValues(List values) { + if (this.values == null) { + this.values = new ArrayList<>(); + } + + if(this.values == values) { + return; + } + + this.values.clear(); + + if(values != null) { + this.values.addAll(values); + } + } + + @Override + public String toString( ) { + StringBuilder sb = new StringBuilder(); + + toString(sb); + + return sb.toString(); + } + + public StringBuilder toString(StringBuilder sb) { + sb.append("RangerPolicyCondition={"); + sb.append("type={").append(type).append("} "); + sb.append("values={"); + if(values != null) { + for(String value : values) { + sb.append(value).append(" "); + } + } + sb.append("} "); + sb.append("}"); + + return sb; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((type == null) ? 0 : type.hashCode()); + result = prime * result + + ((values == null) ? 0 : values.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + RangerPolicyItemCondition other = (RangerPolicyItemCondition) obj; + if (type == null) { + if (other.type != null) + return false; + } else if (!type.equals(other.type)) + return false; + if (values == null) { + if (other.values != null) + return false; + } else if (!values.equals(other.values)) + return false; + return true; + } + + } + + @JsonInclude(JsonInclude.Include.NON_NULL) + @XmlRootElement + @XmlAccessorType(XmlAccessType.FIELD) + public static class RangerPolicyItemDataMaskInfo implements java.io.Serializable { + private static final long serialVersionUID = 1L; + + private String dataMaskType; + private String conditionExpr; + private String valueExpr; + + public RangerPolicyItemDataMaskInfo() { } + + public RangerPolicyItemDataMaskInfo(String dataMaskType, String conditionExpr, String valueExpr) { + setDataMaskType(dataMaskType); + setConditionExpr(conditionExpr); + setValueExpr(valueExpr); + } + + public RangerPolicyItemDataMaskInfo(RangerPolicyItemDataMaskInfo that) { + this.dataMaskType = that.dataMaskType; + this.conditionExpr = that.conditionExpr; + this.valueExpr = that.valueExpr; + } + + public String getDataMaskType() { + return dataMaskType; + } + + public void setDataMaskType(String dataMaskType) { + this.dataMaskType = dataMaskType; + } + + public String getConditionExpr() { + return conditionExpr; + } + + public void setConditionExpr(String conditionExpr) { + this.conditionExpr = conditionExpr; + } + + public String getValueExpr() { + return valueExpr; + } + + public void setValueExpr(String valueExpr) { + this.valueExpr = valueExpr; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + ((dataMaskType == null) ? 0 : dataMaskType.hashCode()); + result = prime * result + ((conditionExpr == null) ? 0 : conditionExpr.hashCode()); + result = prime * result + ((valueExpr == null) ? 0 : valueExpr.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + RangerPolicyItemDataMaskInfo other = (RangerPolicyItemDataMaskInfo) obj; + if (dataMaskType == null) { + if (other.dataMaskType != null) + return false; + } else if (!dataMaskType.equals(other.dataMaskType)) + return false; + if (conditionExpr == null) { + if (other.conditionExpr != null) + return false; + } else if (!conditionExpr.equals(other.conditionExpr)) + return false; + if (valueExpr == null) { + if (other.valueExpr != null) + return false; + } else if (!valueExpr.equals(other.valueExpr)) + return false; + return true; + } + + @Override + public String toString( ) { + StringBuilder sb = new StringBuilder(); + + toString(sb); + + return sb.toString(); + } + + public StringBuilder toString(StringBuilder sb) { + sb.append("RangerPolicyItemDataMaskInfo={"); + + sb.append("dataMaskType={").append(dataMaskType).append("} "); + sb.append("conditionExpr={").append(conditionExpr).append("} "); + sb.append("valueExpr={").append(valueExpr).append("} "); + + sb.append("}"); + + return sb; + } + } + + @JsonInclude(JsonInclude.Include.NON_NULL) + @XmlRootElement + @XmlAccessorType(XmlAccessType.FIELD) + public static class RangerPolicyItemRowFilterInfo implements java.io.Serializable { + private static final long serialVersionUID = 1L; + + private String filterExpr; + + public RangerPolicyItemRowFilterInfo() { } + + public RangerPolicyItemRowFilterInfo(String filterExpr) { + setFilterExpr(filterExpr); + } + + public RangerPolicyItemRowFilterInfo(RangerPolicyItemRowFilterInfo that) { + this.filterExpr = that.filterExpr; + } + + public String getFilterExpr() { + return filterExpr; + } + + public void setFilterExpr(String filterExpr) { + this.filterExpr = filterExpr; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + ((filterExpr == null) ? 0 : filterExpr.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + RangerPolicyItemRowFilterInfo other = (RangerPolicyItemRowFilterInfo) obj; + if (filterExpr == null) { + if (other.filterExpr != null) + return false; + } else if (!filterExpr.equals(other.filterExpr)) + return false; + return true; + } + + @Override + public String toString( ) { + StringBuilder sb = new StringBuilder(); + + toString(sb); + + return sb.toString(); + } + + public StringBuilder toString(StringBuilder sb) { + sb.append("RangerPolicyItemRowFilterInfo={"); + + sb.append("filterExpr={").append(filterExpr).append("} "); + + sb.append("}"); + + return sb; + } + } +} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerRole.java b/auth-common/src/main/java/org/apache/atlas/plugin/model/RangerRole.java similarity index 100% rename from auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerRole.java rename to auth-common/src/main/java/org/apache/atlas/plugin/model/RangerRole.java diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerServiceDef.java b/auth-common/src/main/java/org/apache/atlas/plugin/model/RangerServiceDef.java similarity index 99% rename from auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerServiceDef.java rename to auth-common/src/main/java/org/apache/atlas/plugin/model/RangerServiceDef.java index 4c5d8988d0..994e6a7117 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerServiceDef.java +++ b/auth-common/src/main/java/org/apache/atlas/plugin/model/RangerServiceDef.java @@ -99,10 +99,10 @@ public RangerServiceDef(String name, String implClass, String label, String desc } public RangerServiceDef(String name, String displayName, String implClass, String label, String description, - Map options, List configs, - List modifiedResourceDefs, List accessTypes, - List policyConditions, List contextEnrichers, - List enums) { + Map options, List configs, + List modifiedResourceDefs, List accessTypes, + List policyConditions, List contextEnrichers, + List enums) { this(name, implClass, label, description, options, configs, modifiedResourceDefs, accessTypes, policyConditions, contextEnrichers, enums); this.setDisplayName(displayName); } diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerValidityRecurrence.java b/auth-common/src/main/java/org/apache/atlas/plugin/model/RangerValidityRecurrence.java similarity index 100% rename from auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerValidityRecurrence.java rename to auth-common/src/main/java/org/apache/atlas/plugin/model/RangerValidityRecurrence.java diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerValiditySchedule.java b/auth-common/src/main/java/org/apache/atlas/plugin/model/RangerValiditySchedule.java similarity index 90% rename from auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerValiditySchedule.java rename to auth-common/src/main/java/org/apache/atlas/plugin/model/RangerValiditySchedule.java index 3bb3f078b5..f166598e28 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerValiditySchedule.java +++ b/auth-common/src/main/java/org/apache/atlas/plugin/model/RangerValiditySchedule.java @@ -19,11 +19,9 @@ package org.apache.atlas.plugin.model; - -import org.codehaus.jackson.annotate.JsonAutoDetect; -import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility; -import org.codehaus.jackson.annotate.JsonIgnoreProperties; -import org.codehaus.jackson.map.annotate.JsonSerialize; +import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonAutoDetect; +import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonIgnoreProperties; +import org.apache.htrace.shaded.fasterxml.jackson.databind.annotation.JsonSerialize; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; @@ -33,7 +31,7 @@ import java.util.Arrays; import java.util.List; -@JsonAutoDetect(fieldVisibility=Visibility.ANY) +@JsonAutoDetect(fieldVisibility= JsonAutoDetect.Visibility.ANY) @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @JsonIgnoreProperties(ignoreUnknown=true) @XmlRootElement diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/UserInfo.java b/auth-common/src/main/java/org/apache/atlas/plugin/model/UserInfo.java similarity index 100% rename from auth-agents-common/src/main/java/org/apache/atlas/plugin/model/UserInfo.java rename to auth-common/src/main/java/org/apache/atlas/plugin/model/UserInfo.java index 4a690a0a29..43b5fe1682 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/UserInfo.java +++ b/auth-common/src/main/java/org/apache/atlas/plugin/model/UserInfo.java @@ -19,8 +19,8 @@ package org.apache.atlas.plugin.model; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; import org.apache.atlas.plugin.util.RangerUserStoreUtil; +import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRoles.java b/auth-common/src/main/java/org/apache/atlas/plugin/util/RangerRoles.java similarity index 100% rename from auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRoles.java rename to auth-common/src/main/java/org/apache/atlas/plugin/util/RangerRoles.java index 083871f1e2..ddcb6dea58 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRoles.java +++ b/auth-common/src/main/java/org/apache/atlas/plugin/util/RangerRoles.java @@ -19,8 +19,8 @@ package org.apache.atlas.plugin.util; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; import org.apache.atlas.plugin.model.RangerRole; +import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerUserStore.java b/auth-common/src/main/java/org/apache/atlas/plugin/util/RangerUserStore.java similarity index 99% rename from auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerUserStore.java rename to auth-common/src/main/java/org/apache/atlas/plugin/util/RangerUserStore.java index 7318648cf8..dc1115fa4c 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerUserStore.java +++ b/auth-common/src/main/java/org/apache/atlas/plugin/util/RangerUserStore.java @@ -59,7 +59,7 @@ public RangerUserStore(Long userStoreVersion, Set users, Set isAccessAllowed(" + request + ")"); } boolean ret = false; + long startTime = System.currentTimeMillis(); + final String uuid = UUID.randomUUID().toString(); try { final String action = request.getAction() != null ? request.getAction().getType() : null; @@ -702,19 +710,21 @@ private boolean isAccessAllowed(AtlasEntityAccessRequest request, RangerAtlasAud } // check authorization for each classification + LOG.info("classification level authorization started: " + (System.currentTimeMillis()-startTime) + "uuid: "+uuid); for (AtlasClassification classificationToAuthorize : request.getEntityClassifications()) { rangerResource.setValue(RESOURCE_ENTITY_CLASSIFICATION, request.getClassificationTypeAndAllSuperTypes(classificationToAuthorize.getTypeName())); - ret = checkAccess(rangerRequest, auditHandler); + ret = checkAccess(rangerRequest, auditHandler, uuid); if (!ret) { break; } } + LOG.info("classification level authorization ended: " + (System.currentTimeMillis()-startTime) + "uuid: "+uuid); } else { rangerResource.setValue(RESOURCE_ENTITY_CLASSIFICATION, ENTITY_NOT_CLASSIFIED ); - ret = checkAccess(rangerRequest, auditHandler); + ret = checkAccess(rangerRequest, auditHandler, uuid); } } finally { @@ -726,7 +736,7 @@ private boolean isAccessAllowed(AtlasEntityAccessRequest request, RangerAtlasAud if (LOG.isDebugEnabled()) { LOG.debug("<== isAccessAllowed(" + request + "): " + ret); } - + LOG.info("isAccessAllowed ended: " + (System.currentTimeMillis()-startTime) + "uuid: "+uuid); return ret; } @@ -793,9 +803,10 @@ private boolean checkAccess(RangerAccessRequestImpl request) { return ret; } - private boolean checkAccess(RangerAccessRequestImpl request, RangerAtlasAuditHandler auditHandler) { + private boolean checkAccess(RangerAccessRequestImpl request, RangerAtlasAuditHandler auditHandler, String uuid) { boolean ret = false; - + long startTime = System.currentTimeMillis(); + LOG.info("checkAccess started at: " + startTime + " uuid: " + uuid); RangerBasePlugin plugin = atlasPlugin; String userName = request.getUser(); @@ -809,7 +820,7 @@ private boolean checkAccess(RangerAccessRequestImpl request, RangerAtlasAuditHan LOG.debug("Setting UserGroup for user :" + userName + " Groups: " + groupUtil.getContainedGroups(userName)); } - RangerAccessResult result = plugin.isAccessAllowed(request, auditHandler); + RangerAccessResult result = plugin.isAccessAllowed(request, auditHandler, uuid); ret = result != null && result.getIsAllowed(); @@ -817,6 +828,7 @@ private boolean checkAccess(RangerAccessRequestImpl request, RangerAtlasAuditHan LOG.warn("RangerAtlasPlugin not initialized. Access blocked!!!"); } + LOG.info("checkAccess ended at: " + (System.currentTimeMillis()-startTime) + " uuid: " + uuid); return ret; } @@ -859,17 +871,20 @@ private void checkAccessAndScrub(AtlasEntityHeader entity, AtlasSearchResultScru } private void checkAccessAndScrub(AtlasEntityHeader entity, AtlasSearchResultScrubRequest request, boolean isScrubAuditEnabled) throws AtlasAuthorizationException { + long t0 = System.currentTimeMillis(); if (entity != null && request != null) { final AtlasEntityAccessRequest entityAccessRequest = new AtlasEntityAccessRequest(request.getTypeRegistry(), AtlasPrivilege.ENTITY_READ, entity, request.getUser(), request.getUserGroups()); entityAccessRequest.setClientIPAddress(request.getClientIPAddress()); entityAccessRequest.setForwardedAddresses(request.getForwardedAddresses()); entityAccessRequest.setRemoteIPAddress(request.getRemoteIPAddress()); - + LOG.info("isEntityAccessAllowed started in: " + (System.currentTimeMillis() - t0)); boolean isEntityAccessAllowed = isScrubAuditEnabled ? isAccessAllowed(entityAccessRequest) : isAccessAllowed(entityAccessRequest, null); + LOG.info("isEntityAccessAllowed ended in: " + (System.currentTimeMillis() - t0)); if (!isEntityAccessAllowed) { scrubEntityHeader(entity, request.getTypeRegistry()); } + LOG.info("scrubEntityHeader ended in: " + (System.currentTimeMillis() - t0)); } } diff --git a/authorization/src/main/java/org/apache/atlas/authorize/AtlasAuthorizer.java b/authorization/src/main/java/org/apache/atlas/authorize/AtlasAuthorizer.java index 22aea9ea6c..ade9ae79c7 100644 --- a/authorization/src/main/java/org/apache/atlas/authorize/AtlasAuthorizer.java +++ b/authorization/src/main/java/org/apache/atlas/authorize/AtlasAuthorizer.java @@ -19,6 +19,7 @@ package org.apache.atlas.authorize; +import com.esotericsoftware.minlog.Log; import org.apache.atlas.model.instance.AtlasEntityHeader; import org.apache.atlas.type.AtlasEntityType; import org.apache.atlas.type.AtlasStructType; @@ -127,6 +128,7 @@ void scrubEntityHeader(AtlasEntityHeader entity) { default void scrubEntityHeader(AtlasEntityHeader entity, AtlasTypeRegistry typeRegistry) { + long startTime = System.currentTimeMillis(); AtlasEntityType entityType = typeRegistry.getEntityTypeByName(entity.getTypeName()); boolean isScrubbed = false; @@ -142,7 +144,7 @@ default void scrubEntityHeader(AtlasEntityHeader entity, AtlasTypeRegistry typeR } entity.setScrubbed(isScrubbed); - + Log.info("Time taken by scrubEntityHeader: "+ (System.currentTimeMillis() - startTime)); } diff --git a/authorization/src/main/java/org/apache/atlas/authorize/AtlasPrivilege.java b/authorization/src/main/java/org/apache/atlas/authorize/AtlasPrivilege.java index aaf02cbe7a..29d332cad1 100644 --- a/authorization/src/main/java/org/apache/atlas/authorize/AtlasPrivilege.java +++ b/authorization/src/main/java/org/apache/atlas/authorize/AtlasPrivilege.java @@ -51,7 +51,9 @@ public enum AtlasPrivilege { ADMIN_ENTITY_AUDITS("admin-entity-audits"), ADMIN_REPAIR_INDEX("admin-repair-index"), - ADMIN_TASK_CUD("admin-task-cud"); + ADMIN_TASK_CUD("admin-task-cud"), + + ADMIN_FEATURE_FLAG_CUD("admin-featureFlag-cud"); private final String type; AtlasPrivilege(String actionType){ diff --git a/build.sh b/build.sh index d60c540769..d3f77f5647 100755 --- a/build.sh +++ b/build.sh @@ -24,9 +24,9 @@ unzip -o keycloak-15.0.2.1.zip -d ~/.m2/repository/org echo "Maven Building" if [ "$1" == "build_without_dashboard" ]; then - mvn -pl '!test-tools,!addons/hdfs-model,!addons/hive-bridge,!addons/hive-bridge-shim,!addons/falcon-bridge-shim,!addons/falcon-bridge,!addons/sqoop-bridge,!addons/sqoop-bridge-shim,!addons/hbase-bridge,!addons/hbase-bridge-shim,!addons/hbase-testing-util,!addons/kafka-bridge,!addons/impala-hook-api,!addons/impala-bridge-shim,!addons/impala-bridge,!dashboardv2,!dashboardv3' -Dmaven.test.skip -DskipTests -Drat.skip=true -DskipOverlay -DskipEnunciate=true package -Pdist + mvn -pl '!addons/hdfs-model,!addons/hive-bridge,!addons/hive-bridge-shim,!addons/falcon-bridge-shim,!addons/falcon-bridge,!addons/sqoop-bridge,!addons/sqoop-bridge-shim,!addons/hbase-bridge,!addons/hbase-bridge-shim,!addons/hbase-testing-util,!addons/kafka-bridge,!addons/impala-hook-api,!addons/impala-bridge-shim,!addons/impala-bridge,!dashboardv2,!dashboardv3' -Dmaven.test.skip -DskipTests -Drat.skip=true -DskipOverlay -DskipEnunciate=true package -Pdist else - mvn -pl '!test-tools,!addons/hdfs-model,!addons/hive-bridge,!addons/hive-bridge-shim,!addons/falcon-bridge-shim,!addons/falcon-bridge,!addons/sqoop-bridge,!addons/sqoop-bridge-shim,!addons/hbase-bridge,!addons/hbase-bridge-shim,!addons/hbase-testing-util,!addons/kafka-bridge,!addons/impala-hook-api,!addons/impala-bridge-shim,!addons/impala-bridge' -Dmaven.test.skip -DskipTests -Drat.skip=true -DskipEnunciate=true package -Pdist + mvn -pl '!addons/hdfs-model,!addons/hive-bridge,!addons/hive-bridge-shim,!addons/falcon-bridge-shim,!addons/falcon-bridge,!addons/sqoop-bridge,!addons/sqoop-bridge-shim,!addons/hbase-bridge,!addons/hbase-bridge-shim,!addons/hbase-testing-util,!addons/kafka-bridge,!addons/impala-hook-api,!addons/impala-bridge-shim,!addons/impala-bridge' -Dmaven.test.skip -DskipTests -Drat.skip=true -DskipEnunciate=true package -Pdist fi echo "[DEBUG listing distro/target" diff --git a/client-auth/src/main/java/org/apache/atlas/auth/client/auth/AbstractAuthClient.java b/client-auth/src/main/java/org/apache/atlas/auth/client/auth/AbstractAuthClient.java index cb13431384..988a120bfd 100644 --- a/client-auth/src/main/java/org/apache/atlas/auth/client/auth/AbstractAuthClient.java +++ b/client-auth/src/main/java/org/apache/atlas/auth/client/auth/AbstractAuthClient.java @@ -38,7 +38,7 @@ public class AbstractAuthClient { private static final String BEARER = "Bearer "; private static final int TIMEOUT_IN_SEC = 60; private static final String INTEGRATION = "integration"; - private static final String KEYCLOAK = "keycloak"; + private static final String AUTH = "auth"; protected final AuthConfig authConfig; protected final RetrofitKeycloakClient retrofitKeycloakClient; @@ -84,10 +84,10 @@ public AbstractAuthClient(AuthConfig authConfig) { Interceptor responseLoggingInterceptor = chain -> { Request request = chain.request(); String rawPath = request.url().uri().getRawPath(); - Timer.Sample timerSample = this.metricUtils.start(rawPath); + Timer.Sample timerSample = this.metricUtils.start(rawPath, false); okhttp3.Response response = chain.proceed(request); this.metricUtils.recordHttpTimer(timerSample, request.method(), rawPath, response.code(), - INTEGRATION, KEYCLOAK); + INTEGRATION, AUTH); return response; }; diff --git a/common/pom.xml b/common/pom.xml index ff4d502f12..47c274ca4d 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -145,51 +145,7 @@ - - io.netty - netty-all - ${netty4.version} - - - io.netty - netty-handler - ${netty4.version} - - - io.netty - netty-common - ${netty4.version} - - - io.netty - netty-resolver - ${netty4.version} - - - io.netty - netty-codec - ${netty4.version} - - - io.netty - netty-transport - ${netty4.version} - - - io.netty - netty-buffer - ${netty4.version} - - - io.netty - netty-transport-native-epoll - ${netty4.version} - - - io.netty - netty-transport-native-unix-common - ${netty4.version} - + diff --git a/common/src/main/java/org/apache/atlas/repository/Constants.java b/common/src/main/java/org/apache/atlas/repository/Constants.java index 8d4d47ea57..f2f7fd1e3e 100644 --- a/common/src/main/java/org/apache/atlas/repository/Constants.java +++ b/common/src/main/java/org/apache/atlas/repository/Constants.java @@ -19,7 +19,9 @@ package org.apache.atlas.repository; import org.apache.atlas.ApplicationProperties; +import org.apache.atlas.AtlasConfiguration; import org.apache.atlas.AtlasException; +import org.apache.atlas.service.FeatureFlagStore; import org.apache.commons.configuration.Configuration; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; @@ -133,10 +135,32 @@ public final class Constants { public static final String GLOSSARY_TERMS_EDGE_LABEL = "r:AtlasGlossaryTermAnchor"; public static final String GLOSSARY_CATEGORY_EDGE_LABEL = "r:AtlasGlossaryCategoryAnchor"; + /** + * MESH property keys. + */ + public static final String DATA_DOMAIN_ENTITY_TYPE = "DataDomain"; + public static final String DATA_PRODUCT_ENTITY_TYPE = "DataProduct"; + + public static final String STAKEHOLDER_ENTITY_TYPE = "Stakeholder"; + public static final String STAKEHOLDER_TITLE_ENTITY_TYPE = "StakeholderTitle"; + + public static final String REL_DOMAIN_TO_DOMAINS = "parent_domain_sub_domains"; + public static final String REL_DOMAIN_TO_PRODUCTS = "data_domain_data_products"; + + public static final String REL_DOMAIN_TO_STAKEHOLDERS = "data_domain_stakeholders"; + public static final String REL_STAKEHOLDER_TITLE_TO_STAKEHOLDERS = "stakeholder_title_stakeholders"; + + public static final String REL_DATA_PRODUCT_TO_OUTPUT_PORTS = "data_products_output_ports"; + public static final String REL_DATA_PRODUCT_TO_INPUT_PORTS = "data_products_input_ports"; + + public static final String INPUT_PORT_PRODUCT_EDGE_LABEL = "__Asset.inputPortDataProducts"; + public static final String OUTPUT_PORT_PRODUCT_EDGE_LABEL = "__Asset.outputPortDataProducts"; /** * SQL property keys. */ + + public static final String SQL_ENTITY_TYPE = "SQL"; public static final String CONNECTION_ENTITY_TYPE = "Connection"; public static final String QUERY_ENTITY_TYPE = "Query"; public static final String QUERY_FOLDER_ENTITY_TYPE = "Folder"; @@ -150,6 +174,9 @@ public final class Constants { public static final String PURPOSE_ENTITY_TYPE = "Purpose"; public static final String POLICY_ENTITY_TYPE = "AuthPolicy"; public static final String SERVICE_ENTITY_TYPE = "AuthService"; + public static final String REL_POLICY_TO_ACCESS_CONTROL = "access_control_policies"; + + public static final String POLICY_SERVICE_NAME_ABAC = "atlas_abac"; /** * Resource @@ -162,6 +189,13 @@ public final class Constants { public static final String ASSET_README_EDGE_LABEL = "__Asset.readme"; public static final String ASSET_LINK_EDGE_LABEL = "__Asset.links"; + /** + * Contract + */ + public static final String CONTRACT_ENTITY_TYPE = "DataContract"; + public static final String ATTR_CONTRACT_VERSION = "dataContractVersion"; + + /** * Lineage relations. */ @@ -371,7 +405,6 @@ public enum SupportedFileExtensions { XLSX, XLS, CSV } public static final String CATALOG_PROCESS_INPUT_RELATIONSHIP_LABEL = "__Process.inputs"; public static final String CATALOG_PROCESS_OUTPUT_RELATIONSHIP_LABEL = "__Process.outputs"; - public static final String COLUMN_LINEAGE_RELATIONSHIP_LABEL = "__Process.columnProcesses"; public static final String CLASSIFICATION_PROPAGATION_MODE_DEFAULT ="DEFAULT"; public static final String CLASSIFICATION_PROPAGATION_MODE_RESTRICT_LINEAGE ="RESTRICT_LINEAGE"; @@ -381,14 +414,12 @@ public enum SupportedFileExtensions { XLSX, XLS, CSV } public static final HashMap> CLASSIFICATION_PROPAGATION_MODE_LABELS_MAP = new HashMap>(){{ put(CLASSIFICATION_PROPAGATION_MODE_RESTRICT_LINEAGE, new ArrayList<>( Arrays.asList(CATALOG_PROCESS_INPUT_RELATIONSHIP_LABEL, - CATALOG_PROCESS_OUTPUT_RELATIONSHIP_LABEL, - COLUMN_LINEAGE_RELATIONSHIP_LABEL + CATALOG_PROCESS_OUTPUT_RELATIONSHIP_LABEL ))); put(CLASSIFICATION_PROPAGATION_MODE_DEFAULT, null); put(CLASSIFICATION_PROPAGATION_MODE_RESTRICT_HIERARCHY, new ArrayList<>( Arrays.asList(CATALOG_PROCESS_INPUT_RELATIONSHIP_LABEL, - CATALOG_PROCESS_OUTPUT_RELATIONSHIP_LABEL, - COLUMN_LINEAGE_RELATIONSHIP_LABEL + CATALOG_PROCESS_OUTPUT_RELATIONSHIP_LABEL ))); }}; @@ -403,7 +434,9 @@ public enum SupportedFileExtensions { XLSX, XLS, CSV } public static final String ATTR_STARRED_DETAILS_LIST = "starredDetailsList"; public static final String ATTR_ASSET_STARRED_BY = "assetStarredBy"; public static final String ATTR_ASSET_STARRED_AT = "assetStarredAt"; - + public static final String ATTR_CERTIFICATE_STATUS = "certificateStatus"; + public static final String ATTR_CONTRACT = "dataContractSpec"; + public static final String ATTR_CONTRACT_JSON = "dataContractJson"; public static final String STRUCT_STARRED_DETAILS = "StarredDetails"; public static final String KEYCLOAK_ROLE_ADMIN = "$admin"; @@ -418,12 +451,15 @@ public enum SupportedFileExtensions { XLSX, XLS, CSV } public static final Set SKIP_UPDATE_AUTH_CHECK_TYPES = new HashSet() {{ add(README_ENTITY_TYPE); add(LINK_ENTITY_TYPE); + add(STAKEHOLDER_ENTITY_TYPE); + add(STAKEHOLDER_TITLE_ENTITY_TYPE); }}; public static final Set SKIP_DELETE_AUTH_CHECK_TYPES = new HashSet() {{ add(README_ENTITY_TYPE); add(LINK_ENTITY_TYPE); add(POLICY_ENTITY_TYPE); + add(STAKEHOLDER_TITLE_ENTITY_TYPE); }}; private Constants() { @@ -445,6 +481,20 @@ private static String getEncodedTypePropertyKey(String defaultKey) { } } + public static String getESIndex() { + String indexSuffix = null; + if(AtlasConfiguration.ATLAS_MAINTENANCE_MODE.getBoolean()) { + try { + if (FeatureFlagStore.evaluate("use_temp_es_index", "true")) { + indexSuffix = "_temp"; + } + } catch (Exception e) { + LOG.error("Failed to evaluate feature flag with error", e); + } + } + return indexSuffix == null ? VERTEX_INDEX_NAME : VERTEX_INDEX_NAME + indexSuffix; + } + public static String getStaticFileAsString(String fileName) throws IOException { String atlasHomeDir = System.getProperty("atlas.home"); atlasHomeDir = StringUtils.isEmpty(atlasHomeDir) ? "." : atlasHomeDir; diff --git a/common/src/main/java/org/apache/atlas/service/FeatureFlagStore.java b/common/src/main/java/org/apache/atlas/service/FeatureFlagStore.java new file mode 100644 index 0000000000..adfca599d1 --- /dev/null +++ b/common/src/main/java/org/apache/atlas/service/FeatureFlagStore.java @@ -0,0 +1,45 @@ +package org.apache.atlas.service; + +import org.apache.atlas.service.redis.RedisService; +import org.apache.commons.lang.StringUtils; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.stereotype.Component; + +@Component +public class FeatureFlagStore { + private static RedisService redisService = null; + public FeatureFlagStore(@Qualifier("redisServiceImpl") RedisService redisService) { + FeatureFlagStore.redisService = redisService; + } + + public static boolean evaluate(String key, String expectedValue) { + boolean ret = false; + try{ + if (StringUtils.isEmpty(key) || StringUtils.isEmpty(expectedValue)) + return ret; + String value = redisService.getValue(addFeatureFlagNamespace(key)); + ret = StringUtils.equals(value, expectedValue); + } catch (Exception e) { + return ret; + } + return ret; + } + + public static void setFlag(String key, String value) { + if (StringUtils.isEmpty(key) || StringUtils.isEmpty(value)) + return; + + redisService.putValue(addFeatureFlagNamespace(key), value); + } + + public static void deleteFlag(String key) { + if (StringUtils.isEmpty(key)) + return; + + redisService.removeValue(addFeatureFlagNamespace(key)); + } + + private static String addFeatureFlagNamespace(String key) { + return "ff:"+key; + } +} diff --git a/common/src/main/java/org/apache/atlas/service/Services.java b/common/src/main/java/org/apache/atlas/service/Services.java index 5beab35421..2d548467fe 100644 --- a/common/src/main/java/org/apache/atlas/service/Services.java +++ b/common/src/main/java/org/apache/atlas/service/Services.java @@ -18,7 +18,6 @@ package org.apache.atlas.service; import org.apache.atlas.annotation.AtlasService; -import org.apache.atlas.type.AtlasType; import org.apache.commons.configuration.Configuration; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; @@ -28,7 +27,6 @@ import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.inject.Inject; - import java.util.List; import static org.apache.atlas.AtlasConstants.ATLAS_MIGRATION_MODE_FILENAME; @@ -67,8 +65,8 @@ public void start() { continue; } - LOG.info("Starting service {}", svc.getClass().getName()); + svc.start(); } } catch (Exception e) { diff --git a/common/src/main/java/org/apache/atlas/service/metrics/MetricUtils.java b/common/src/main/java/org/apache/atlas/service/metrics/MetricUtils.java index c309190391..523225ce7f 100644 --- a/common/src/main/java/org/apache/atlas/service/metrics/MetricUtils.java +++ b/common/src/main/java/org/apache/atlas/service/metrics/MetricUtils.java @@ -52,6 +52,10 @@ public Timer.Sample start(String uri) { return matchCanonicalPattern(uri).isPresent() ? Timer.start(getMeterRegistry()) : null; } + public Timer.Sample start(String uri, boolean checkCanonicalPattern) { + return checkCanonicalPattern ? start(uri) : Timer.start(getMeterRegistry()); + } + public void recordHttpTimer(Timer.Sample sample, String method, String rawPath, int code, String... additionalTags) { if (Objects.isNull(sample)) { return; diff --git a/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistry.java b/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistry.java index 9fdf5b903e..8974155487 100644 --- a/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistry.java +++ b/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistry.java @@ -10,7 +10,7 @@ public interface MetricsRegistry { void collect(String requestId, String requestUri, AtlasPerfMetrics metrics); - void collectIndexsearch(String requestId, String requestUri, List applicationMetrics); + void collectApplicationMetrics(String requestId, String requestUri, List applicationMetrics); void scrape(PrintWriter writer) throws IOException; diff --git a/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistryServiceImpl.java b/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistryServiceImpl.java index 1ae6d2980c..19171325e2 100644 --- a/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistryServiceImpl.java +++ b/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistryServiceImpl.java @@ -1,12 +1,10 @@ package org.apache.atlas.service.metrics; -import io.micrometer.core.instrument.Metrics; -import io.micrometer.core.instrument.Tag; -import io.micrometer.core.instrument.Tags; -import io.micrometer.core.instrument.Timer; +import io.micrometer.core.instrument.*; import io.micrometer.prometheus.PrometheusMeterRegistry; import org.apache.atlas.ApplicationProperties; import org.apache.atlas.AtlasException; +import org.apache.atlas.utils.AtlasMetricType; import org.apache.atlas.utils.AtlasPerfMetrics; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -64,35 +62,42 @@ public void collect(String requestId, String requestUri, AtlasPerfMetrics metric } } //Use this if you want to publish Histograms - public void collectIndexsearch(String requestId, String requestUri, List applicationMetrics){ + public void collectApplicationMetrics(String requestId, String requestUri, List applicationMetrics){ try { for(AtlasPerfMetrics.Metric metric : applicationMetrics){ - Timer.builder(APPLICATION_LEVEL_METRICS_SUMMARY) - .serviceLevelObjectives( - Duration.ofMillis(500), - Duration.ofMillis(750), - Duration.ofMillis(1000), - Duration.ofMillis(1200), - Duration.ofMillis(1500), - Duration.ofSeconds(2), - Duration.ofSeconds(3), - Duration.ofSeconds(4), - Duration.ofSeconds(5), - Duration.ofSeconds(7), - Duration.ofSeconds(10), - Duration.ofSeconds(15), - Duration.ofSeconds(20), - Duration.ofSeconds(25), - Duration.ofSeconds(30), - Duration.ofSeconds(40), - Duration.ofSeconds(60), - Duration.ofSeconds(90), - Duration.ofSeconds(120), - Duration.ofSeconds(180) - ) - .publishPercentiles(PERCENTILES) - .tags(convertToMicrometerTags(metric.getTags())) - .register(getMeterRegistry()).record(metric.getTotalTimeMSecs(), TimeUnit.MILLISECONDS); + if (metric.getMetricType() == AtlasMetricType.COUNTER) { + Counter.builder(metric.getName()) + .tags(convertToMicrometerTags(metric.getTags())) + .register(getMeterRegistry()) + .increment(metric.getInvocations()); + } else { + Timer.builder(APPLICATION_LEVEL_METRICS_SUMMARY) + .serviceLevelObjectives( + Duration.ofMillis(500), + Duration.ofMillis(750), + Duration.ofMillis(1000), + Duration.ofMillis(1200), + Duration.ofMillis(1500), + Duration.ofSeconds(2), + Duration.ofSeconds(3), + Duration.ofSeconds(4), + Duration.ofSeconds(5), + Duration.ofSeconds(7), + Duration.ofSeconds(10), + Duration.ofSeconds(15), + Duration.ofSeconds(20), + Duration.ofSeconds(25), + Duration.ofSeconds(30), + Duration.ofSeconds(40), + Duration.ofSeconds(60), + Duration.ofSeconds(90), + Duration.ofSeconds(120), + Duration.ofSeconds(180) + ) + .publishPercentiles(PERCENTILES) + .tags(convertToMicrometerTags(metric.getTags())) + .register(getMeterRegistry()).record(metric.getTotalTimeMSecs(), TimeUnit.MILLISECONDS); + } } } catch (Exception e) { LOG.error("Failed to collect metrics", e); diff --git a/common/src/main/java/org/apache/atlas/service/redis/AbstractRedisService.java b/common/src/main/java/org/apache/atlas/service/redis/AbstractRedisService.java index 0ae20a60cc..51d505cc7a 100644 --- a/common/src/main/java/org/apache/atlas/service/redis/AbstractRedisService.java +++ b/common/src/main/java/org/apache/atlas/service/redis/AbstractRedisService.java @@ -29,9 +29,9 @@ public abstract class AbstractRedisService implements RedisService { private static final String ATLAS_REDIS_LOCK_WATCHDOG_TIMEOUT_MS = "atlas.redis.lock.watchdog_timeout.ms"; private static final int DEFAULT_REDIS_WAIT_TIME_MS = 15_000; private static final int DEFAULT_REDIS_LOCK_WATCHDOG_TIMEOUT_MS = 600_000; - private static final String ATLAS_METASTORE_SERVICE = "atlas-metastore-service"; RedissonClient redisClient; + RedissonClient redisCacheClient; Map keyLockMap; Configuration atlasConfig; long waitTimeInMS; @@ -71,6 +71,32 @@ public void releaseDistributedLock(String key) { } } + @Override + public String getValue(String key) { + // If value doesn't exist, return null else return the value + return (String) redisCacheClient.getBucket(convertToNamespace(key)).get(); + } + + @Override + public String putValue(String key, String value) { + // Put the value in the redis cache with TTL + redisCacheClient.getBucket(convertToNamespace(key)).set(value); + return value; + } + + @Override + public String putValue(String key, String value, int timeout) { + // Put the value in the redis cache with TTL + redisCacheClient.getBucket(convertToNamespace(key)).set(value, timeout, TimeUnit.SECONDS); + return value; + } + + @Override + public void removeValue(String key) { + // Remove the value from the redis cache + redisCacheClient.getBucket(convertToNamespace(key)).delete(); + } + private String getHostAddress() throws UnknownHostException { return InetAddress.getLocalHost().getHostAddress(); } @@ -85,6 +111,11 @@ private Config initAtlasConfig() throws AtlasException { return redisConfig; } + private String convertToNamespace(String key){ + // Append key with namespace :atlas + return "atlas:"+key; + } + Config getLocalConfig() throws AtlasException { Config config = initAtlasConfig(); config.useSingleServer() @@ -97,7 +128,18 @@ Config getLocalConfig() throws AtlasException { Config getProdConfig() throws AtlasException { Config config = initAtlasConfig(); config.useSentinelServers() - .setClientName(ATLAS_METASTORE_SERVICE) + .setReadMode(ReadMode.MASTER_SLAVE) + .setCheckSentinelsList(false) + .setMasterName(atlasConfig.getString(ATLAS_REDIS_MASTER_NAME)) + .addSentinelAddress(formatUrls(atlasConfig.getStringArray(ATLAS_REDIS_SENTINEL_URLS))) + .setUsername(atlasConfig.getString(ATLAS_REDIS_USERNAME)) + .setPassword(atlasConfig.getString(ATLAS_REDIS_PASSWORD)); + return config; + } + + Config getCacheImplConfig() { + Config config = new Config(); + config.useSentinelServers() .setReadMode(ReadMode.MASTER_SLAVE) .setCheckSentinelsList(false) .setKeepAlive(true) @@ -108,7 +150,9 @@ Config getProdConfig() throws AtlasException { .setMasterName(atlasConfig.getString(ATLAS_REDIS_MASTER_NAME)) .addSentinelAddress(formatUrls(atlasConfig.getStringArray(ATLAS_REDIS_SENTINEL_URLS))) .setUsername(atlasConfig.getString(ATLAS_REDIS_USERNAME)) - .setPassword(atlasConfig.getString(ATLAS_REDIS_PASSWORD)); + .setPassword(atlasConfig.getString(ATLAS_REDIS_PASSWORD)) + .setTimeout(50) //Setting UP timeout to 50ms + .setRetryAttempts(0); return config; } diff --git a/common/src/main/java/org/apache/atlas/service/redis/NoRedisServiceImpl.java b/common/src/main/java/org/apache/atlas/service/redis/NoRedisServiceImpl.java index 96a8fadc99..9bd942d304 100644 --- a/common/src/main/java/org/apache/atlas/service/redis/NoRedisServiceImpl.java +++ b/common/src/main/java/org/apache/atlas/service/redis/NoRedisServiceImpl.java @@ -29,6 +29,21 @@ public void releaseDistributedLock(String key) { //do nothing } + @Override + public String getValue(String key) { + return null; + } + + @Override + public String putValue(String key, String value, int timeout) { + return null; + } + + @Override + public void removeValue(String key) { + + } + @Override public Logger getLogger() { return LOG; diff --git a/common/src/main/java/org/apache/atlas/service/redis/RedisService.java b/common/src/main/java/org/apache/atlas/service/redis/RedisService.java index 1475f93e83..a541b1eeee 100644 --- a/common/src/main/java/org/apache/atlas/service/redis/RedisService.java +++ b/common/src/main/java/org/apache/atlas/service/redis/RedisService.java @@ -8,6 +8,14 @@ public interface RedisService { void releaseDistributedLock(String key); + String getValue(String key); + + String putValue(String key, String value); + + String putValue(String key, String value, int timeout); + + void removeValue(String key); + Logger getLogger(); } diff --git a/common/src/main/java/org/apache/atlas/service/redis/RedisServiceImpl.java b/common/src/main/java/org/apache/atlas/service/redis/RedisServiceImpl.java index 42dec6fa78..48f199473e 100644 --- a/common/src/main/java/org/apache/atlas/service/redis/RedisServiceImpl.java +++ b/common/src/main/java/org/apache/atlas/service/redis/RedisServiceImpl.java @@ -18,6 +18,7 @@ public class RedisServiceImpl extends AbstractRedisService{ @PostConstruct public void init() throws AtlasException { redisClient = Redisson.create(getProdConfig()); + redisCacheClient = Redisson.create(getCacheImplConfig()); LOG.info("Sentinel redis client created successfully."); } diff --git a/common/src/main/java/org/apache/atlas/service/redis/RedisServiceLocalImpl.java b/common/src/main/java/org/apache/atlas/service/redis/RedisServiceLocalImpl.java index 2eb774920e..c69a151a7d 100644 --- a/common/src/main/java/org/apache/atlas/service/redis/RedisServiceLocalImpl.java +++ b/common/src/main/java/org/apache/atlas/service/redis/RedisServiceLocalImpl.java @@ -18,9 +18,25 @@ public class RedisServiceLocalImpl extends AbstractRedisService { @PostConstruct public void init() throws AtlasException { redisClient = Redisson.create(getLocalConfig()); + redisCacheClient = Redisson.create(getLocalConfig()); LOG.info("Local redis client created successfully."); } + @Override + public String getValue(String key) { + return null; + } + + @Override + public String putValue(String key, String value, int timeout) { + return null; + } + + @Override + public void removeValue(String key) { + + } + @Override public Logger getLogger() { return LOG; diff --git a/common/src/main/java/org/apache/atlas/utils/AtlasMetricType.java b/common/src/main/java/org/apache/atlas/utils/AtlasMetricType.java new file mode 100644 index 0000000000..6752b7fbd4 --- /dev/null +++ b/common/src/main/java/org/apache/atlas/utils/AtlasMetricType.java @@ -0,0 +1,9 @@ +package org.apache.atlas.utils; + +public enum AtlasMetricType { + COUNTER, + GAUGE, + HISTOGRAM, + METER, + TIMER +} diff --git a/common/src/main/java/org/apache/atlas/utils/AtlasPerfMetrics.java b/common/src/main/java/org/apache/atlas/utils/AtlasPerfMetrics.java index beebcb6ab1..dd8a101ad5 100644 --- a/common/src/main/java/org/apache/atlas/utils/AtlasPerfMetrics.java +++ b/common/src/main/java/org/apache/atlas/utils/AtlasPerfMetrics.java @@ -104,6 +104,8 @@ long getElapsedTime() { public static class Metric { private final String name; + + private AtlasMetricType metricType; private long invocations = 0; private long totalTimeMSecs = 0; HashMap tags = new HashMap<>(); @@ -112,6 +114,14 @@ public Metric(String name) { this.name = name; } + public void setMetricType(AtlasMetricType metricType) { + this.metricType = metricType; + } + + public AtlasMetricType getMetricType() { + return metricType; + } + public String getName() { return name; } @@ -135,5 +145,9 @@ public HashMap getTags() { return tags; } + public void incrementInvocations() { + invocations++; + } + } } diff --git a/distro/src/main/assemblies/atlas-server-package.xml b/distro/src/main/assemblies/atlas-server-package.xml index 9eb71ef7cf..47e6bfc0f5 100755 --- a/distro/src/main/assemblies/atlas-server-package.xml +++ b/distro/src/main/assemblies/atlas-server-package.xml @@ -161,6 +161,12 @@ ../addons/policies policies + + + ../addons/override-policies + override-policies + + diff --git a/distro/src/main/assemblies/standalone-package.xml b/distro/src/main/assemblies/standalone-package.xml index 3ef91c047e..b3c81f9a60 100755 --- a/distro/src/main/assemblies/standalone-package.xml +++ b/distro/src/main/assemblies/standalone-package.xml @@ -141,6 +141,11 @@ policies + + ../addons/override-policies + override-policies + + ../addons/hive-bridge/src/bin diff --git a/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasGraph.java b/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasGraph.java index 666ab376a7..2b41b1c07d 100644 --- a/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasGraph.java +++ b/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasGraph.java @@ -383,9 +383,4 @@ public interface AtlasGraph { * @throws AtlasException when error encountered in creating the client. */ AtlasGraphIndexClient getGraphIndexClient()throws AtlasException; - - - void setEnableCache(boolean enableCache); - - Boolean isCacheEnabled(); } diff --git a/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasIndexQuery.java b/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasIndexQuery.java index 09fa0d6d06..d31f721b47 100644 --- a/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasIndexQuery.java +++ b/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasIndexQuery.java @@ -26,7 +26,7 @@ import java.util.List; import java.util.Map; import java.util.Set; - +import java.util.ArrayList; /** * A graph query that runs directly against a particular index. * @@ -103,7 +103,7 @@ interface Result { DirectIndexQueryResult getCollapseVertices(String key); Map> getHighLights(); - + ArrayList getSort(); } } diff --git a/graphdb/janus/pom.xml b/graphdb/janus/pom.xml index 5daef76b4e..3c91e4442a 100644 --- a/graphdb/janus/pom.xml +++ b/graphdb/janus/pom.xml @@ -119,10 +119,6 @@ ch.qos.logback * - - io.netty - * - com.codahale.metrics metrics-core @@ -139,6 +135,10 @@ ch.qos.logback * + + cassandra-hadoop-util + * + @@ -212,10 +212,6 @@ org.codehaus.woodstox woodstox-core-asl - - io.netty - * - @@ -282,6 +278,10 @@ mockito-all test + + org.apache.atlas + atlas-server-api + diff --git a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasElasticsearchQuery.java b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasElasticsearchQuery.java index 9d2e2489e8..3d49deccee 100644 --- a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasElasticsearchQuery.java +++ b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasElasticsearchQuery.java @@ -19,12 +19,15 @@ import org.apache.atlas.AtlasConfiguration; import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.RequestContext; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.discovery.SearchParams; import org.apache.atlas.repository.graphdb.AtlasIndexQuery; import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.graphdb.DirectIndexQueryResult; import org.apache.atlas.type.AtlasType; +import org.apache.atlas.utils.AtlasMetricType; +import org.apache.atlas.utils.AtlasPerfMetrics; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang.StringUtils; @@ -46,6 +49,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.janusgraph.util.encoding.LongEncoding; +import org.redisson.client.RedisException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -143,7 +147,7 @@ private DirectIndexQueryResult runQueryWithLowLevelClient(SearchParams searchPar } } - private Map runQueryWithLowLevelClient(String query) throws AtlasBaseException { + public Map runQueryWithLowLevelClient(String query) throws AtlasBaseException { Map ret = new HashMap<>(); try { String responseString = performDirectIndexQuery(query, true); @@ -171,14 +175,16 @@ private Map runQueryWithLowLevelClient(String query) throws Atla } private DirectIndexQueryResult performAsyncDirectIndexQuery(SearchParams searchParams) throws AtlasBaseException, IOException { + AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("performAsyncDirectIndexQuery"); DirectIndexQueryResult result = null; + boolean contextIdExists = StringUtils.isNotEmpty(searchParams.getSearchContextId()) && searchParams.getSearchContextSequenceNo() != null; try { - if(StringUtils.isNotEmpty(searchParams.getSearchContextId()) && searchParams.getSearchContextSequenceNo() != null) { - // If the search context id and greater sequence no is present, then we need to delete the previous search context async - processRequestWithSameSearchContextId(searchParams); + if(contextIdExists) { + // If the search context id and greater sequence no is present, + // then we need to delete the previous search context async + processRequestWithSameSearchContextId(searchParams); } AsyncQueryResult response = submitAsyncSearch(searchParams, false).get(); - if(response.isRunning()) { /* * If the response is still running, then we need to wait for the response @@ -189,8 +195,8 @@ private DirectIndexQueryResult performAsyncDirectIndexQuery(SearchParams searchP String esSearchId = response.getId(); String searchContextId = searchParams.getSearchContextId(); Integer searchContextSequenceNo = searchParams.getSearchContextSequenceNo(); - if (StringUtils.isNotEmpty(searchContextId) && searchContextSequenceNo != null) { - SearchContextCache.put(searchContextId, searchContextSequenceNo, esSearchId); + if (contextIdExists) { + CompletableFuture.runAsync(() -> SearchContextCache.put(searchContextId, searchContextSequenceNo, esSearchId)); } response = getAsyncSearchResponse(searchParams, esSearchId).get(); if (response == null) { @@ -204,6 +210,16 @@ private DirectIndexQueryResult performAsyncDirectIndexQuery(SearchParams searchP }catch (Exception e) { LOG.error("Failed to execute direct query on ES {}", e.getMessage()); throw new AtlasBaseException(AtlasErrorCode.INDEX_SEARCH_FAILED, e.getMessage()); + } finally { + if (contextIdExists) { + // If the search context id is present, then we need to remove the search context from the cache + try { + CompletableFuture.runAsync(() -> SearchContextCache.remove(searchParams.getSearchContextId())); + } catch (Exception e) { + LOG.error("Failed to remove the search context from the cache {}", e.getMessage()); + } + } + RequestContext.get().endMetricRecord(metric); } return result; } @@ -218,17 +234,31 @@ private DirectIndexQueryResult performAsyncDirectIndexQuery(SearchParams searchP * We also need to check if the search ID exists and delete if necessary */ private void processRequestWithSameSearchContextId(SearchParams searchParams) { - // Extract search context ID and sequence number - String currentSearchContextId = searchParams.getSearchContextId(); - Integer currentSequenceNumber = searchParams.getSearchContextSequenceNo(); - // Get the search ID from the cache if sequence number is greater than the current sequence number - String previousESSearchId = SearchContextCache.getESAsyncSearchIdFromContextCache(currentSearchContextId, currentSequenceNumber); - - if (StringUtils.isNotEmpty(previousESSearchId)) { - LOG.debug("Deleting the previous async search response with ID {}", previousESSearchId); - // If the search ID exists, then we need to delete the search context - deleteAsyncSearchResponse(previousESSearchId); - SearchContextCache.remove(currentSearchContextId); + AtlasPerfMetrics.MetricRecorder funcMetric = RequestContext.get().startMetricRecord("processRequestWithSameSearchContextId"); + try { + // Extract search context ID and sequence number + String currentSearchContextId = searchParams.getSearchContextId(); + Integer currentSequenceNumber = searchParams.getSearchContextSequenceNo(); + // Get the search ID from the cache if sequence number is greater than the current sequence number + String previousESSearchId = SearchContextCache.getESAsyncSearchIdFromContextCache(currentSearchContextId, currentSequenceNumber); + + if (StringUtils.isNotEmpty(previousESSearchId)) { + LOG.debug("Deleting the previous async search response with ID {}", previousESSearchId); + // If the search ID exists, then we need to delete the search context + deleteAsyncSearchResponse(previousESSearchId); + } + } catch (RedisException e) { + AtlasPerfMetrics.Metric failureCounter = new AtlasPerfMetrics.Metric("async_request_redis_failure_counter"); + failureCounter.setMetricType(AtlasMetricType.COUNTER); + failureCounter.incrementInvocations(); + LOG.error("Failed to process the request with the same search context ID {}", e.getMessage()); + RequestContext.get().addApplicationMetrics(failureCounter); + } + catch (Exception e) { + LOG.error("Failed to process the request with the same search context ID {}", e.getMessage()); + } + finally { + RequestContext.get().endMetricRecord(funcMetric); } } @@ -415,7 +445,10 @@ private DirectIndexQueryResult getResultFromResponse(Map if (hits_0 == null) { return result; } - this.vertexTotals = (Integer) hits_0.get("total").get("value"); + LinkedHashMap approximateCount = hits_0.get("total"); + if (approximateCount != null) { + this.vertexTotals = (Integer) approximateCount.get("value"); + } List hits_1 = AtlasType.fromJson(AtlasType.toJson(hits_0.get("hits")), List.class); @@ -511,6 +544,11 @@ public DirectIndexQueryResult getCollapseVerti public Map> getHighLights() { return new HashMap<>(); } + + @Override + public ArrayList getSort() { + return new ArrayList<>(); + } } @@ -576,6 +614,15 @@ public Map> getHighLights() { } return new HashMap<>(); } + + @Override + public ArrayList getSort() { + Object sort = this.hit.get("sort"); + if (Objects.nonNull(sort) && sort instanceof List) { + return (ArrayList) sort; + } + return new ArrayList<>(); + } } public class AsyncQueryResult { diff --git a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraph.java b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraph.java index 2886a01eb9..3a9c933797 100644 --- a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraph.java +++ b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraph.java @@ -126,6 +126,7 @@ public class AtlasJanusGraph implements AtlasGraph directIndexQuery(String query) throws AtlasBaseExcept @Override public Iterator> vertices() { - Iterator> results = query.vertices().iterator(); + Iterator> results = query.vertexStream().iterator(); Function, Result> function = new Function, Result>() { @@ -77,7 +77,7 @@ public Iterator> vertices(int offset, i Iterator> results = query .offset(offset) .limit(limit) - .vertices().iterator(); + .vertexStream().iterator(); Function, Result> function = new Function, Result>() { @@ -100,7 +100,7 @@ public Iterator> vertices(int offset, i .orderBy(sortBy, sortOrder) .offset(offset) .limit(limit) - .vertices().iterator(); + .vertexStream().iterator(); Function, Result> function = new Function, Result>() { @@ -153,5 +153,10 @@ public DirectIndexQueryResult getCollapseVerti public Map> getHighLights() { return new HashMap<>(); } + + @Override + public ArrayList getSort() { + return new ArrayList<>(); + } } } diff --git a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/SearchContextCache.java b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/SearchContextCache.java index 1780e5d1f9..f7e5718f19 100644 --- a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/SearchContextCache.java +++ b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/SearchContextCache.java @@ -1,49 +1,77 @@ package org.apache.atlas.repository.graphdb.janus; +import org.apache.atlas.RequestContext; +import org.apache.atlas.service.redis.RedisService; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.stereotype.Component; +@Component +public class SearchContextCache { + private static final Logger LOG = LoggerFactory.getLogger(SearchContextCache.class); + private static RedisService redisService = null; -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; + public static final String INVALID_SEQUENCE = "invalid_sequence"; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.TimeUnit; -public class SearchContextCache { - private static final Cache> searchContextCache = CacheBuilder.newBuilder() - .maximumSize(200) - .expireAfterWrite(30, TimeUnit.SECONDS) - .build(); + public SearchContextCache(@Qualifier("redisServiceImpl") RedisService redisService) { + SearchContextCache.redisService = redisService; + } + public static void put(String key, Integer sequence, String esAsyncId) { - HashMap entry = new HashMap<>(); - entry.put(sequence, esAsyncId); - searchContextCache.put(key, entry); + AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("putInCache"); + try { + // Build the string in format `sequence/esAsyncId` and store it in redis + String val = sequence + "/" + esAsyncId; + redisService.putValue(key, val, 30); + } finally { + RequestContext.get().endMetricRecord(metric); + } } - public static HashMap get(String key){ - return searchContextCache.getIfPresent(key); + public static String get(String key) { + try { + return redisService.getValue(key); + } catch (Exception e) { + LOG.error("Error while fetching value from Redis", e); + return null; + } + } public static String getESAsyncSearchIdFromContextCache(String key, Integer sequence){ - //Get the context cache for the given key - HashMap contextCache = get(key); - if(contextCache == null || sequence == null){ - return null; - } - //Find the highest sequence number - int maxStoredSequence = 0; - for (Integer seq : contextCache.keySet()) { - if (seq > maxStoredSequence) { - maxStoredSequence = seq; + AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("getESAsyncSearchIdFromContextCache"); + try { + //Get the context cache for the given key + String contextCache = get(key); + if(contextCache == null || sequence == null){ + return null; } + // Split the context cache to get the sequence and ESAsyncId + String[] contextCacheSplit = contextCache.split("/"); + if(contextCacheSplit.length != 2){ + return null; + } + int seq = Integer.parseInt(contextCacheSplit[0]); + if(sequence > seq){ + return contextCacheSplit[1]; + } else if (sequence < seq) { + return INVALID_SEQUENCE; + } + return null; + } finally { + RequestContext.get().endMetricRecord(metric); } - //If the given sequence is greater than the max stored sequence, return the ESAsyncId else return null - return sequence > maxStoredSequence ? contextCache.getOrDefault(maxStoredSequence, null) : null; - } - public static void remove(String key) { - searchContextCache.invalidate(key); } + public static void remove(String key) { + AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("removeFromCache"); + try { + redisService.removeValue(key); + } finally { + RequestContext.get().endMetricRecord(metric); + } - public static void clear() { - searchContextCache.cleanUp(); } } + diff --git a/graphdb/janus/src/main/java/org/janusgraph/diskstorage/solr/Solr6Index.java b/graphdb/janus/src/main/java/org/janusgraph/diskstorage/solr/Solr6Index.java index 23c11de301..e462344f99 100644 --- a/graphdb/janus/src/main/java/org/janusgraph/diskstorage/solr/Solr6Index.java +++ b/graphdb/janus/src/main/java/org/janusgraph/diskstorage/solr/Solr6Index.java @@ -121,6 +121,7 @@ import org.janusgraph.graphdb.query.condition.Not; import org.janusgraph.graphdb.query.condition.Or; import org.janusgraph.graphdb.query.condition.PredicateCondition; +import org.janusgraph.graphdb.tinkerpop.optimize.step.Aggregation; import org.janusgraph.graphdb.types.ParameterType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -597,6 +598,11 @@ public void restore(Map>> documents, } } + @Override + public Number queryAggregation(IndexQuery indexQuery, KeyInformation.IndexRetriever indexRetriever, BaseTransaction baseTransaction, Aggregation aggregation) throws BackendException { + return null; + } + // This method will create a map of field ids to values. In the case of multiValued fields, // it will consolidate all the values into one List or Set so it can be updated with a single Solr operation private Map collectFieldValues(List content, String collectionName, @@ -672,7 +678,6 @@ public Stream query(IndexQuery query, KeyInformation.IndexRetriever info doc -> doc.getFieldValue(keyIdField).toString()); } - @Override public Long queryCount(IndexQuery query, KeyInformation.IndexRetriever information, BaseTransaction tx) throws BackendException { try { String collection = query.getStore(); @@ -1064,6 +1069,11 @@ public void clearStorage() throws BackendException { } } + @Override + public void clearStore(String s) throws BackendException { + + } + @Override public boolean supports(KeyInformation information, JanusGraphPredicate predicate) { final Class dataType = information.getDataType(); diff --git a/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java b/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java index fd2766616f..1ad37b2564 100644 --- a/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java +++ b/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java @@ -290,8 +290,8 @@ public enum AtlasErrorCode { TASK_INVALID_PARAMETERS(400, "ATLAS-400-00-111", "Invalid parameters for task {0}"), TASK_TYPE_NOT_SUPPORTED(400, "ATLAS-400-00-112", "Task type {0} is not supported"), - PERSONA_POLICY_ASSETS_LIMIT_EXCEEDED(400, "ATLAS-400-00-113", "Exceeded limit of maximum allowed assets across policies for a Persona: Limit: {0}, assets: {1}"); - + PERSONA_POLICY_ASSETS_LIMIT_EXCEEDED(400, "ATLAS-400-00-113", "Exceeded limit of maximum allowed assets across policies for a Persona: Limit: {0}, assets: {1}"), + ADMIN_LIST_SHOULD_NOT_BE_EMPTY(400, "ATLAS-400-00-114", "Admin list should not be empty for type {0}"); private String errorCode; private String errorMessage; diff --git a/intg/src/main/java/org/apache/atlas/model/audit/EntityAuditEventV2.java b/intg/src/main/java/org/apache/atlas/model/audit/EntityAuditEventV2.java index 9a4b03df73..1947147e38 100644 --- a/intg/src/main/java/org/apache/atlas/model/audit/EntityAuditEventV2.java +++ b/intg/src/main/java/org/apache/atlas/model/audit/EntityAuditEventV2.java @@ -133,8 +133,12 @@ public static EntityAuditActionV2 fromString(String strValue) { private AtlasEntity entity; private EntityAuditType type; private Map detail; + private AtlasEntityHeader entityDetail; private Map headers; + private List> classificationDetails; + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) + private String classificationDetail; public EntityAuditEventV2() { } @@ -290,12 +294,13 @@ public boolean equals(Object o) { Objects.equals(created, that.created) && Objects.equals(typeName, that.typeName) && Objects.equals(entityQualifiedName, that.entityQualifiedName) && - Objects.equals(headers, that.headers); + Objects.equals(headers, that.headers) && + Objects.equals(classificationDetails, that.classificationDetails); } @Override public int hashCode() { - return Objects.hash(entityId, timestamp, user, action, details, eventKey, entity, type, detail, created, entityQualifiedName, typeName, headers); + return Objects.hash(entityId, timestamp, user, action, details, eventKey, entity, type, detail, created, entityQualifiedName, typeName, headers, classificationDetails); } @Override @@ -315,6 +320,7 @@ public String toString() { sb.append(", detail=").append(detail); sb.append(", created=").append(created); sb.append(", headers=").append(headers); + sb.append(", classificationDetails").append(classificationDetails); sb.append('}'); return sb.toString(); @@ -346,6 +352,7 @@ public void clear() { detail = null; created = 0L; headers = null; + classificationDetails = null; } private String getJsonPartFromDetails() { @@ -355,7 +362,7 @@ private String getJsonPartFromDetails() { if(bracketStartPosition != -1) { ret = details.substring(bracketStartPosition); } - } else if(MapUtils.isNotEmpty(detail)) { + } else if(!detail.isEmpty()) { ret = AtlasType.toJson(detail); } @@ -415,4 +422,20 @@ public static void sortEvents(List events, String sortByColu events.sort(sortOrderDesc ? comparator.reversed() : comparator); } + + public List> getClassificationDetails() { + return classificationDetails; + } + + public void setClassificationDetails(List> classificationDetails) { + this.classificationDetails = classificationDetails; + } + + public String getClassificationDetail() { + return classificationDetail; + } + + public void setClassificationDetail(String classificationDetail) { + this.classificationDetail = classificationDetail; + } } \ No newline at end of file diff --git a/intg/src/main/java/org/apache/atlas/model/discovery/AtlasSearchResult.java b/intg/src/main/java/org/apache/atlas/model/discovery/AtlasSearchResult.java index 30d31b28a9..96bc0dc587 100644 --- a/intg/src/main/java/org/apache/atlas/model/discovery/AtlasSearchResult.java +++ b/intg/src/main/java/org/apache/atlas/model/discovery/AtlasSearchResult.java @@ -37,6 +37,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.LinkedHashMap; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.PUBLIC_ONLY; @@ -59,7 +60,7 @@ public class AtlasSearchResult implements Serializable { private Map aggregations; private Map searchScore; - private Map searchMetadata; + private LinkedHashMap searchMetadata; @@ -162,13 +163,26 @@ public Map getSearchMetadata() { public void addHighlights(String guid, Map> highlights) { if(MapUtils.isEmpty(this.searchMetadata)) { - this.searchMetadata = new HashMap<>(); + this.searchMetadata = new LinkedHashMap<>(); } ElasticsearchMetadata v = this.searchMetadata.getOrDefault(guid, new ElasticsearchMetadata()); v.addHighlights(highlights); this.searchMetadata.put(guid, v); } + public void addSort(String guid, ArrayList sort) { + if(MapUtils.isEmpty(this.searchMetadata)) { + this.searchMetadata = new LinkedHashMap<>(); + } + ElasticsearchMetadata sortMetadata = this.searchMetadata.getOrDefault(guid, new ElasticsearchMetadata()); + sortMetadata.addSort(sort); + if (this.searchMetadata.containsKey(guid)) { + this.searchMetadata.replace(guid, sortMetadata); + } else { + this.searchMetadata.put(guid, sortMetadata); + } + } + @Override public int hashCode() { return Objects.hash(queryType, searchParameters, queryText, type, classification, entities, attributes, fullTextResult, referredEntities, nextMarker); } diff --git a/intg/src/main/java/org/apache/atlas/model/discovery/ElasticsearchMetadata.java b/intg/src/main/java/org/apache/atlas/model/discovery/ElasticsearchMetadata.java index 270ea5e8d8..096ff82b83 100644 --- a/intg/src/main/java/org/apache/atlas/model/discovery/ElasticsearchMetadata.java +++ b/intg/src/main/java/org/apache/atlas/model/discovery/ElasticsearchMetadata.java @@ -1,15 +1,20 @@ package org.apache.atlas.model.discovery; +import com.fasterxml.jackson.annotation.JsonInclude; import org.apache.commons.collections.MapUtils; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.ArrayList; public class ElasticsearchMetadata { private Map> highlights; + @JsonInclude(JsonInclude.Include.NON_NULL) + private ArrayList sort; + public Map> getHighlights() { return highlights; } @@ -23,6 +28,15 @@ public void addHighlights(Map> highlights) { } } + public Object getSort() { return sort; } + + public void addSort(ArrayList sort) { + if (sort.isEmpty()) { + this.sort = null; + } else { + this.sort = sort; + } + } @Override public String toString() { diff --git a/intg/src/main/java/org/apache/atlas/model/discovery/IndexSearchParams.java b/intg/src/main/java/org/apache/atlas/model/discovery/IndexSearchParams.java index 8d8cc08247..c06ca21030 100644 --- a/intg/src/main/java/org/apache/atlas/model/discovery/IndexSearchParams.java +++ b/intg/src/main/java/org/apache/atlas/model/discovery/IndexSearchParams.java @@ -34,6 +34,11 @@ public String getQuery() { return queryString; } + @Override + public void setQuery(String query) { + this.queryString = query; + } + public Map getDsl() { return dsl; } @@ -89,6 +94,7 @@ public String toString() { ", allowDeletedRelations=" + allowDeletedRelations + ", accessControlExclusive=" + accessControlExclusive + ", utmTags="+ getUtmTags() + + ", enableFullRestriction="+ enableFullRestriction + '}'; } } diff --git a/intg/src/main/java/org/apache/atlas/model/discovery/SearchParams.java b/intg/src/main/java/org/apache/atlas/model/discovery/SearchParams.java index 57000cc84f..d01224bf80 100644 --- a/intg/src/main/java/org/apache/atlas/model/discovery/SearchParams.java +++ b/intg/src/main/java/org/apache/atlas/model/discovery/SearchParams.java @@ -18,20 +18,33 @@ public class SearchParams { boolean suppressLogs; boolean excludeMeanings; boolean excludeClassifications; + boolean enableFullRestriction; + + boolean includeClassificationNames = false; RequestMetadata requestMetadata = new RequestMetadata(); Async async = new Async(); boolean showHighlights; + boolean showSearchMetadata; + public String getQuery() { return getQuery(); } + public boolean getEnableFullRestriction() { + return enableFullRestriction; + } + public Set getAttributes() { return attributes; } + public void setQuery(String query) { + setQuery(query); + } + public void setAttributes(Set attributes) { this.attributes = attributes; } @@ -100,6 +113,14 @@ public void setExcludeMeanings(boolean excludeMeanings) { this.excludeMeanings = excludeMeanings; } + public boolean isIncludeClassificationNames() { + return includeClassificationNames; + } + + public void setIncludeClassificationNames(boolean includeClassificationNames) { + this.includeClassificationNames = includeClassificationNames; + } + public boolean isSaveSearchLog() { return requestMetadata.saveSearchLog; } @@ -144,10 +165,14 @@ public String getSearchInput() { return this.requestMetadata.getSearchInput(); } - public boolean isShowHighlights() { + public boolean getShowHighlights() { return showHighlights; } + public boolean getShowSearchMetadata() { + return showSearchMetadata; + } + static class RequestMetadata { private String searchInput; diff --git a/intg/src/main/java/org/apache/atlas/model/instance/AtlasEntityHeader.java b/intg/src/main/java/org/apache/atlas/model/instance/AtlasEntityHeader.java index be2819d7f2..c251811df1 100644 --- a/intg/src/main/java/org/apache/atlas/model/instance/AtlasEntityHeader.java +++ b/intg/src/main/java/org/apache/atlas/model/instance/AtlasEntityHeader.java @@ -71,8 +71,13 @@ public class AtlasEntityHeader extends AtlasStruct implements Serializable { private Date createTime = null; private Date updateTime = null; private String deleteHandler = null; + private Integer depth = null; + private Integer traversalOrder = null; + private Integer finishTime = null; + private Map collapse = null; + public AtlasEntityHeader() { this(null, null); } @@ -146,12 +151,24 @@ public AtlasEntityHeader(AtlasEntity entity) { } } - public String getGuid() { - return guid; + public String getGuid() { return guid; } + + public void setGuid(String guid) { this.guid = guid; } + + public Integer getDepth() { return depth; } + + public void setDepth(Integer depth) { this.depth = depth; } + + public Integer getTraversalOrder() { return traversalOrder; } + + public void setTraversalOrder(Integer traversalOrder) { this.traversalOrder = traversalOrder; } + + public Integer getFinishTime() { + return finishTime; } - public void setGuid(String guid) { - this.guid = guid; + public void setFinishTime(Integer finishTime) { + this.finishTime = finishTime; } public AtlasEntity.Status getStatus() { diff --git a/intg/src/main/java/org/apache/atlas/type/AtlasTypeRegistry.java b/intg/src/main/java/org/apache/atlas/type/AtlasTypeRegistry.java index 4cb7e4a185..070f24cbeb 100644 --- a/intg/src/main/java/org/apache/atlas/type/AtlasTypeRegistry.java +++ b/intg/src/main/java/org/apache/atlas/type/AtlasTypeRegistry.java @@ -44,6 +44,9 @@ public class AtlasTypeRegistry { private static final Logger LOG = LoggerFactory.getLogger(AtlasTypeRegistry.class); private static final int DEFAULT_LOCK_MAX_WAIT_TIME_IN_SECONDS = 15; + public static final ArrayList TYPENAMES_TO_SKIP_SUPER_TYPE_CHECK = new ArrayList() {{ + add("Table"); + }}; protected RegistryData registryData; private final TypeRegistryUpdateSynchronizer updateSynchronizer; private final Set missingRelationshipDefs; @@ -85,6 +88,10 @@ public AtlasType getType(String typeName) throws AtlasBaseException { LOG.debug("==> AtlasTypeRegistry.getType({})", typeName); } + if (typeName == null) { + return null; + } + AtlasType ret = registryData.allTypes.getTypeByName(typeName); if (ret == null) { diff --git a/intg/src/main/java/org/apache/atlas/type/Constants.java b/intg/src/main/java/org/apache/atlas/type/Constants.java index 01550ae9c0..effc5208b1 100644 --- a/intg/src/main/java/org/apache/atlas/type/Constants.java +++ b/intg/src/main/java/org/apache/atlas/type/Constants.java @@ -54,10 +54,12 @@ public final class Constants { public static final String GLOSSARY_PROPERTY_KEY = encodePropertyKey(INTERNAL_PROPERTY_KEY_PREFIX + "glossary"); public static final String CATEGORIES_PROPERTY_KEY = encodePropertyKey(INTERNAL_PROPERTY_KEY_PREFIX + "categories"); public static final String CATEGORIES_PARENT_PROPERTY_KEY = encodePropertyKey(INTERNAL_PROPERTY_KEY_PREFIX + "parentCategory"); + public static final String MEANINGS_TEXT_PROPERTY_KEY = encodePropertyKey(INTERNAL_PROPERTY_KEY_PREFIX + "meaningsText"); public static final String MEANING_NAMES_PROPERTY_KEY = encodePropertyKey(INTERNAL_PROPERTY_KEY_PREFIX + "meaningNames"); public static final String HAS_LINEAGE = encodePropertyKey(INTERNAL_PROPERTY_KEY_PREFIX + "hasLineage"); public static final String HAS_LINEAGE_VALID = encodePropertyKey(INTERNAL_PROPERTY_KEY_PREFIX + "hasLineageValid"); + public static final String LEXICOGRAPHICAL_SORT_ORDER = "lexicographicalSortOrder"; //Classification-Only System Attributes public static final String CLASSIFICATION_ENTITY_STATUS_PROPERTY_KEY = encodePropertyKey(INTERNAL_PROPERTY_KEY_PREFIX + "entityStatus"); diff --git a/pom.xml b/pom.xml index 1cc9aa70dc..bc759155f9 100644 --- a/pom.xml +++ b/pom.xml @@ -697,7 +697,7 @@ 4.3.0 1.8 3.2.2 - 7.16.2 + 7.17.4 org.apache.atlas.repository.audit.InMemoryEntityAuditRepository 2.13.2 2.18.1 @@ -717,7 +717,7 @@ 4.4.13 2.12.4 2.12.4 - 0.6.03 + 1.0.0 0.5.3 1 3.1.0 @@ -773,12 +773,11 @@ 2C 3.0.0-M5 6.9.4 - 3.5.1 + 3.7.0 5.0.3 3.4.6 3.20.1 1.11.1 - 4.1.61.Final @@ -798,6 +797,7 @@ dashboardv2 dashboardv3 + auth-common auth-agents-cred auth-agents-common auth-audits @@ -836,16 +836,6 @@ false - - github - https://maven.pkg.github.com/atlanhq/atlan-janusgraph - - true - - - true - - hortonworks.repo https://repo.hortonworks.com/content/repositories/releases diff --git a/repository/pom.xml b/repository/pom.xml index 10d8d876fb..2a0133883b 100755 --- a/repository/pom.xml +++ b/repository/pom.xml @@ -59,6 +59,18 @@ atlas-graphdb-api + + org.apache.tinkerpop + gremlin-util + ${tinkerpop.version} + + + + org.jctools + jctools-core + 4.0.1 + + org.antlr antlr4-runtime @@ -111,7 +123,7 @@ io.netty - * + netty-handler org.slf4j @@ -322,6 +334,22 @@ 3.0.0-SNAPSHOT + + org.apache.atlas + auth-common + 3.0.0-SNAPSHOT + + + org.hibernate + hibernate-validator + 4.3.2.Final + + + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml + 2.12.7 + + @@ -377,6 +405,21 @@ + + + com.networknt + json-schema-validator + 1.0.87 + + + + + org.apache.commons + commons-lang3 + + + + @@ -393,7 +436,6 @@ org.apache.atlas atlas-testtools ${project.version} - test com.fasterxml.jackson.core diff --git a/repository/src/main/java/org/apache/atlas/authorizer/JsonToElasticsearchQuery.java b/repository/src/main/java/org/apache/atlas/authorizer/JsonToElasticsearchQuery.java new file mode 100644 index 0000000000..f7715b1f58 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/authorizer/JsonToElasticsearchQuery.java @@ -0,0 +1,86 @@ +package org.apache.atlas.authorizer; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import org.apache.atlas.RequestContext; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class JsonToElasticsearchQuery { + private static final Logger LOG = LoggerFactory.getLogger(JsonToElasticsearchQuery.class); + + private static JsonNode convertConditionToQuery(String condition, JsonNode criterion, ObjectMapper mapper) { + if (condition.equals("AND")) { + return mapper.createObjectNode().set("bool", mapper.createObjectNode().set("filter", mapper.createArrayNode())); + } else if (condition.equals("OR")) { + //JsonNode node = mapper.createObjectNode().set("bool", mapper.createObjectNode()); + return mapper.createObjectNode() + .set("bool", mapper.createObjectNode() + .set("should", mapper.createArrayNode())); + } else { + throw new IllegalArgumentException("Unsupported condition: " + condition); + } + } + + public static JsonNode convertJsonToQuery(JsonNode data, ObjectMapper mapper) { + AtlasPerfMetrics.MetricRecorder convertJsonToQueryMetrics = RequestContext.get().startMetricRecord("convertJsonToQuery"); + String condition = data.get("condition").asText(); + JsonNode criterion = data.get("criterion"); + + JsonNode query = convertConditionToQuery(condition, criterion, mapper); + + for (JsonNode crit : criterion) { + if (crit.has("condition")) { + JsonNode nestedQuery = convertJsonToQuery(crit, mapper); + if (condition.equals("AND")) { + ((ArrayNode) query.get("bool").get("filter")).add(nestedQuery); + } else { + ((ArrayNode) query.get("bool").get("should")).add(nestedQuery); + } + } else { + String operator = crit.get("operator").asText(); + String attributeName = crit.get("attributeName").asText(); + String attributeValue = crit.get("attributeValue").asText(); + + switch (operator) { + case "EQUALS": + ObjectNode termNode = ((ArrayNode) query.get("bool").get(condition.equals("AND") ? "filter" : "should")).addObject(); + termNode.putObject("term").put(attributeName, attributeValue); + break; + + case "NOT_EQUALS": + termNode = ((ArrayNode) query.get("bool").get(condition.equals("AND") ? "filter" : "should")).addObject(); + termNode.putObject("bool").putObject("must_not").putObject("term").put(attributeName, attributeValue); + break; + + case "STARTS_WITH": + ObjectNode wildcardNode = ((ArrayNode) query.get("bool").get(condition.equals("AND") ? "filter" : "should")).addObject(); + wildcardNode.putObject("wildcard").put(attributeName, attributeValue + "*"); + break; + + case "ENDS_WITH": + wildcardNode = ((ArrayNode) query.get("bool").get(condition.equals("AND") ? "filter" : "should")).addObject(); + wildcardNode.putObject("wildcard").put(attributeName, "*" + attributeValue); + break; + + case "IN": + ObjectNode termsNode = ((ArrayNode) query.get("bool").get(condition.equals("AND") ? "filter" : "should")).addObject(); + termsNode.putObject("terms").set(attributeName, crit.get("attributeValue")); + break; + + case "NOT_IN": + termsNode = ((ArrayNode) query.get("bool").get(condition.equals("AND") ? "filter" : "should")).addObject(); + termsNode.putObject("bool").putObject("must_not").putObject("terms").put(attributeName, crit.get("attributeValue")); + break; + + default: LOG.warn("Found unknown operator {}", operator); + } + } + } + RequestContext.get().endMetricRecord(convertJsonToQueryMetrics); + return query; + } +} diff --git a/repository/src/main/java/org/apache/atlas/authorizer/NewAuthorizerUtils.java b/repository/src/main/java/org/apache/atlas/authorizer/NewAuthorizerUtils.java new file mode 100644 index 0000000000..dd40df8d36 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/authorizer/NewAuthorizerUtils.java @@ -0,0 +1,22 @@ +package org.apache.atlas.authorizer; + +import org.apache.atlas.authorizer.authorizers.ListAuthorizer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.Map; + +public class NewAuthorizerUtils { + private static final Logger LOG = LoggerFactory.getLogger(NewAuthorizerUtils.class); + + public static final String POLICY_TYPE_ALLOW = "allow"; + public static final String POLICY_TYPE_DENY = "deny"; + public static final int MAX_CLAUSE_LIMIT = 1024; + + public static final String DENY_POLICY_NAME_SUFFIX = "_deny"; + + public static Map getPreFilterDsl(String persona, String purpose, List actions) { + return ListAuthorizer.getElasticsearchDSL(persona, purpose, actions); + } +} diff --git a/repository/src/main/java/org/apache/atlas/authorizer/authorizers/AuthorizerCommon.java b/repository/src/main/java/org/apache/atlas/authorizer/authorizers/AuthorizerCommon.java new file mode 100644 index 0000000000..2cfac9f6db --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/authorizer/authorizers/AuthorizerCommon.java @@ -0,0 +1,50 @@ +package org.apache.atlas.authorizer.authorizers; + +import org.apache.atlas.repository.graphdb.AtlasGraph; +import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.stereotype.Component; + +import javax.inject.Inject; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +@Component +public class AuthorizerCommon { + private static final Logger LOG = LoggerFactory.getLogger(AuthorizerCommon.class); + + private static AtlasTypeRegistry typeRegistry; + private static EntityGraphRetriever entityRetriever; + + @Inject + public AuthorizerCommon(AtlasGraph graph, AtlasTypeRegistry typeRegistry) { + this.typeRegistry = typeRegistry; + this.entityRetriever = new EntityGraphRetriever(graph, typeRegistry, true); + } + + public static String getCurrentUserName() { + Authentication auth = SecurityContextHolder.getContext().getAuthentication(); + + return auth != null ? auth.getName() : ""; + } + + public static boolean arrayListContains(List listA, List listB) { + for (String listAItem : listA){ + if (listB.contains(listAItem)) { + return true; + } + } + return false; + } + + public static Map getMap(String key, Object value) { + Map map = new HashMap<>(); + map.put(key, value); + return map; + } +} diff --git a/repository/src/main/java/org/apache/atlas/authorizer/authorizers/ListAuthorizer.java b/repository/src/main/java/org/apache/atlas/authorizer/authorizers/ListAuthorizer.java new file mode 100644 index 0000000000..4e41bebfcd --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/authorizer/authorizers/ListAuthorizer.java @@ -0,0 +1,314 @@ +package org.apache.atlas.authorizer.authorizers; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Lists; +import org.apache.atlas.RequestContext; +import org.apache.atlas.authorize.AtlasAuthorizationUtils; +import org.apache.atlas.authorizer.JsonToElasticsearchQuery; +import org.apache.atlas.authorizer.store.PoliciesStore; +import org.apache.atlas.plugin.model.RangerPolicy; +import org.apache.atlas.type.AtlasType; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections.MapUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Base64; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.apache.atlas.authorizer.NewAuthorizerUtils.DENY_POLICY_NAME_SUFFIX; +import static org.apache.atlas.authorizer.NewAuthorizerUtils.MAX_CLAUSE_LIMIT; +import static org.apache.atlas.authorizer.NewAuthorizerUtils.POLICY_TYPE_ALLOW; +import static org.apache.atlas.authorizer.NewAuthorizerUtils.POLICY_TYPE_DENY; +import static org.apache.atlas.authorizer.authorizers.AuthorizerCommon.getMap; + +public class ListAuthorizer { + private static final Logger LOG = LoggerFactory.getLogger(AtlasAuthorizationUtils.class); + + public static Map getElasticsearchDSL(String persona, String purpose, List actions) { + AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("ListAuthorizer.getElasticsearchDSL"); + Map allowDsl = getElasticsearchDSLForPolicyType(persona, purpose, actions, false, POLICY_TYPE_ALLOW); + Map denyDsl = getElasticsearchDSLForPolicyType(persona, purpose, actions, false, POLICY_TYPE_DENY); + Map finaDsl = new HashMap<>(); + if (allowDsl != null) { + finaDsl.put("filter", allowDsl); + } + if (denyDsl != null) { + finaDsl.put("must_not", denyDsl); + } + + RequestContext.get().endMetricRecord(recorder); + return getMap("bool", finaDsl); + } + + public static Map getElasticsearchDSLForPolicyType(String persona, String purpose, + List actions, boolean requestMatchedPolicyId, + String policyType) { + AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("ListAuthorizer.getElasticsearchDSLForPolicyType."+ policyType); + + List resourcePolicies = PoliciesStore.getRelevantPolicies(persona, purpose, "atlas", actions, policyType); + List tagPolicies = PoliciesStore.getRelevantPolicies(persona, purpose, "atlas_tag", actions, policyType); + + List> shouldClauses = new ArrayList<>(); + if (requestMatchedPolicyId) { + shouldClauses.addAll(getDSLForResourcePoliciesPerPolicy(resourcePolicies)); + shouldClauses.addAll(getDSLForTagPoliciesPerPolicy(tagPolicies)); + } else { + shouldClauses.addAll(getDSLForResourcePolicies(resourcePolicies)); + Map tagDsl = getDSLForTagPolicies(tagPolicies); + if (MapUtils.isNotEmpty(tagDsl)) { + shouldClauses.add(tagDsl); + } + } + + //LOG.info("Applicable policies to user {}", resourcePolicies.size() + tagPolicies.size()); + + Map boolClause = new HashMap<>(); + if (shouldClauses.isEmpty()) { + if (POLICY_TYPE_ALLOW.equals(policyType)) { + boolClause.put("must_not", getMap("match_all", new HashMap<>())); + } else { + return null; + } + + } else { + if (shouldClauses.size() > MAX_CLAUSE_LIMIT) { + List> splittedShould = new ArrayList<>(); + List>> partitionedShouldClause = Lists.partition(shouldClauses, MAX_CLAUSE_LIMIT); + + for (List> chunk : partitionedShouldClause) { + splittedShould.add(getMap("bool", getMap("should", chunk))); + } + boolClause.put("should", splittedShould); + + } else { + boolClause.put("should", shouldClauses); + } + + boolClause.put("minimum_should_match", 1); + } + + RequestContext.get().endMetricRecord(recorder); + return getMap("bool", boolClause); + } + + private static List> getDSLForResourcePolicies(List policies) { + + // To reduce the number of clauses + List combinedEntities = new ArrayList<>(); + Set combinedEntityTypes = new HashSet<>(); + List> shouldClauses = new ArrayList<>(); + + for (RangerPolicy policy : policies) { + if (MapUtils.isNotEmpty(policy.getResources())) { + List entities = new ArrayList<>(0); + List entityTypesRaw = new ArrayList<>(0); + + if (policy.getResources().get("entity") != null) { + entities = policy.getResources().get("entity").getValues(); + } + + if (policy.getResources().get("entity-type") != null) { + entityTypesRaw = policy.getResources().get("entity-type").getValues(); + } + + if (entities.contains("*") && entityTypesRaw.contains("*")) { + Map emptyMap = new HashMap<>(); + shouldClauses.clear(); + shouldClauses.add(getMap("match_all",emptyMap)); + break; + } + + entities.remove("*"); + entityTypesRaw.remove("*"); + + //Set entityTypes = new HashSet<>(); + //entityTypesRaw.forEach(x -> entityTypes.addAll(AuthorizerCommon.getTypeAndSupertypesList(x))); + + if (!entities.isEmpty() && entityTypesRaw.isEmpty()) { + combinedEntities.addAll(entities); + } else if (entities.isEmpty() && !entityTypesRaw.isEmpty()) { + combinedEntityTypes.addAll(entityTypesRaw); + } else if (!entities.isEmpty() && !entityTypesRaw.isEmpty()) { + Map dslForPolicyResources = getDSLForResources(entities, new HashSet<>(entityTypesRaw), null, null); + shouldClauses.add(dslForPolicyResources); + } + } + } + if (!combinedEntities.isEmpty()) { + shouldClauses.add(getDSLForResources(combinedEntities, new HashSet<>(), null, null)); + } + if (!combinedEntityTypes.isEmpty()) { + shouldClauses.add(getDSLForResources(new ArrayList<>(), combinedEntityTypes, null, null)); + } + return shouldClauses; + } + + public static Map getDSLForResources(List entities, Set typeNames, List classifications, String clauseName){ + List> shouldClauses = new ArrayList<>(); + List termsQualifiedNames = new ArrayList<>(); + for (String entity: entities) { + if (!entity.equals("*")) { + if (entity.contains("*") || entity.contains("?")) { + shouldClauses.add(getMap("wildcard", getMap("qualifiedName", entity))); + } else { + termsQualifiedNames.add(entity); + } + } + } + if (!termsQualifiedNames.isEmpty()) { + shouldClauses.add(getMap("terms", getMap("qualifiedName", termsQualifiedNames))); + } + + Map boolClause = new HashMap<>(); + + if (!shouldClauses.isEmpty()) { + boolClause.put("should", shouldClauses); + boolClause.put("minimum_should_match", 1); + } + + List> filterClauses = new ArrayList<>(); + + if (!typeNames.isEmpty() && !typeNames.contains("*")) { + List> typeClauses = new ArrayList<>(); + typeClauses.add(getMap("terms", getMap("__typeName.keyword", typeNames))); + typeClauses.add(getMap("terms", getMap("__superTypeNames.keyword", typeNames))); + + filterClauses.add(getMap("bool", getMap("should", typeClauses))); + } + + if (classifications != null && !classifications.isEmpty() && !classifications.contains("*")) { + List> classificationClauses = new ArrayList<>(); + + classificationClauses.add(getMap("terms", getMap("__traitNames", classifications))); + classificationClauses.add(getMap("terms", getMap("__propagatedTraitNames", classifications))); + + filterClauses.add(getMap("bool", getMap("should", classificationClauses))); + } + + if (!filterClauses.isEmpty()) { + boolClause.put("filter", filterClauses); + } + + if (clauseName != null) { + boolClause.put("_name", clauseName); + } + + return getMap("bool", boolClause); + } + + public static Map getDSLForTagPolicies(List policies) { + // To reduce the number of clauses + Set allTags = new HashSet<>(); + //LOG.info("Found {} tag policies", policies.size()); + + for (RangerPolicy policy : policies) { + if (MapUtils.isNotEmpty(policy.getResources())) { + //LOG.info("policy {}", AtlasType.toJson(policy)); + List tags = new ArrayList<>(0); + + if (policy.getResources().get("tag") != null) { + tags = policy.getResources().get("tag").getValues(); + } + + if (!tags.isEmpty()) { + allTags.addAll(tags); + } + } + } + if (!allTags.isEmpty()) { + return getDSLForTags(allTags); + } + return null; + } + + public static List> getDSLForResourcePoliciesPerPolicy(List policies) { + + List> shouldClauses = new ArrayList<>(); + + for (RangerPolicy policy : policies) { + if (MapUtils.isNotEmpty(policy.getResources())) { + List entities = new ArrayList<>(0); + List entityTypesRaw = new ArrayList<>(0); + + if (policy.getResources().get("entity") != null) { + entities = policy.getResources().get("entity").getValues(); + } + + if (policy.getResources().get("entity-type") != null) { + entityTypesRaw = policy.getResources().get("entity-type").getValues(); + } + + if (entities.contains("*") && entityTypesRaw.contains("*")) { + shouldClauses.clear(); + shouldClauses.add(getMap("match_all", getMap("_name", policy.getGuid() + getPolicySuffix(policy)))); + break; + } + + Map dslForPolicyResources = getDSLForResources(entities, new HashSet<>(entityTypesRaw), null, + policy.getGuid() + getPolicySuffix(policy)); + shouldClauses.add(dslForPolicyResources); + } + } + return shouldClauses; + } + + public static String getPolicySuffix(RangerPolicy policy) { + if (CollectionUtils.isNotEmpty(policy.getDenyPolicyItems())) { + return DENY_POLICY_NAME_SUFFIX; + } + return ""; + } + + public static List> getDSLForTagPoliciesPerPolicy(List policies) { + List> shouldClauses = new ArrayList<>(); + + //LOG.info("Found {} tag policies", policies.size()); + + for (RangerPolicy policy : policies) { + if (MapUtils.isNotEmpty(policy.getResources())) { + //LOG.info("policy {}", AtlasType.toJson(policy)); + List tags = new ArrayList<>(0); + if (policy.getResources().get("tag") != null) { + tags = policy.getResources().get("tag").getValues(); + } + + if (!tags.isEmpty()) { + + List> tagsClauses = new ArrayList<>(); + tagsClauses.add(getMap("terms", getMap("__traitNames", tags))); + tagsClauses.add(getMap("terms", getMap("__propagatedTraitNames", tags))); + + Map shouldMap = getMap("should", tagsClauses); + shouldMap.put("minimum_should_match", 1); + shouldMap.put("_name", policy.getGuid() + getPolicySuffix(policy)); + + Map boolClause = getMap("bool", shouldMap); + shouldClauses.add(boolClause); + } + } + } + + return shouldClauses; + } + + private static Map getDSLForTags(Set tags){ + List> shouldClauses = new ArrayList<>(); + shouldClauses.add(getMap("terms", getMap("__traitNames", tags))); + shouldClauses.add(getMap("terms", getMap("__propagatedTraitNames", tags))); + + Map boolClause = new HashMap<>(); + boolClause.put("should", shouldClauses); + boolClause.put("minimum_should_match", 1); + + return getMap("bool", boolClause); + } +} diff --git a/repository/src/main/java/org/apache/atlas/authorizer/store/PoliciesStore.java b/repository/src/main/java/org/apache/atlas/authorizer/store/PoliciesStore.java new file mode 100644 index 0000000000..0c00f88a54 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/authorizer/store/PoliciesStore.java @@ -0,0 +1,181 @@ +package org.apache.atlas.authorizer.store; + +import org.apache.atlas.RequestContext; +import org.apache.atlas.authorizer.authorizers.AuthorizerCommon; +import org.apache.atlas.plugin.model.RangerPolicy; +import org.apache.atlas.plugin.util.RangerRoles; +import org.apache.atlas.plugin.util.RangerUserStore; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import static org.apache.atlas.authorizer.NewAuthorizerUtils.POLICY_TYPE_ALLOW; +import static org.apache.atlas.authorizer.NewAuthorizerUtils.POLICY_TYPE_DENY; + +public class PoliciesStore { + + private static final Logger LOG = LoggerFactory.getLogger(PoliciesStore.class); + + private static List resourcePolicies; + private static List tagPolicies; + + public static void setResourcePolicies(List resourcePolicies) { + PoliciesStore.resourcePolicies = resourcePolicies; + } + + private static List getResourcePolicies() { + return resourcePolicies; + } + + public static void setTagPolicies(List tagPolicies) { + PoliciesStore.tagPolicies = tagPolicies; + } + + private static List getTagPolicies() { + return tagPolicies; + } + + public static List getRelevantPolicies(String persona, String purpose, String serviceName, List actions, String policyType) { + return getRelevantPolicies(null, null, serviceName, actions, policyType, false); + } + + public static List getRelevantPolicies(String persona, String purpose, String serviceName, List actions, String policyType, boolean ignoreUser) { + AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("getRelevantPolicies"); + String policyQualifiedNamePrefix = null; + if (persona != null && !persona.isEmpty()) { + policyQualifiedNamePrefix = persona; + } else if (purpose != null && !purpose.isEmpty()) { + policyQualifiedNamePrefix = purpose; + } + + List policies = new ArrayList<>(); + if ("atlas".equals(serviceName)) { + policies = getResourcePolicies(); + } else if ("atlas_tag".equals(serviceName)) { + policies = getTagPolicies(); + } + + List filteredPolicies = null; + if (CollectionUtils.isNotEmpty(policies)) { + filteredPolicies = new ArrayList<>(policies); + filteredPolicies = getFilteredPoliciesForQualifiedName(filteredPolicies, policyQualifiedNamePrefix); + filteredPolicies = getFilteredPoliciesForActions(filteredPolicies, actions, policyType); + + if (!ignoreUser) { + String user = AuthorizerCommon.getCurrentUserName(); + LOG.info("Getting relevant policies for user: {}", user); + + RangerUserStore userStore = UsersStore.getUserStore(); + List groups = UsersStore.getGroupsForUser(user, userStore); + + RangerRoles allRoles = UsersStore.getAllRoles(); + List roles = UsersStore.getRolesForUser(user, allRoles); + roles.addAll(UsersStore.getNestedRolesForUser(roles, allRoles)); + + filteredPolicies = getFilteredPoliciesForUser(filteredPolicies, user, groups, roles, policyType); + } + } else { + filteredPolicies = new ArrayList<>(0); + } + + RequestContext.get().endMetricRecord(recorder); + return filteredPolicies; + } + + static List getFilteredPoliciesForQualifiedName(List policies, String qualifiedNamePrefix) { + AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("getFilteredPoliciesForQualifiedName"); + if (qualifiedNamePrefix != null && !qualifiedNamePrefix.isEmpty()) { + List filteredPolicies = new ArrayList<>(); + for(RangerPolicy policy : policies) { + if (policy.getName().startsWith(qualifiedNamePrefix)) { + filteredPolicies.add(policy); + } + } + return filteredPolicies; + } + + RequestContext.get().endMetricRecord(recorder); + return policies; + } + + private static List getFilteredPoliciesForActions(List policies, List actions, String type) { + AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("getFilteredPoliciesForActions"); + List filteredPolicies = new ArrayList<>(); + + + for(RangerPolicy policy : policies) { + RangerPolicy.RangerPolicyItem policyItem = null; + + if (StringUtils.isNotEmpty(type)) { + if (POLICY_TYPE_ALLOW.equals(type) && !policy.getPolicyItems().isEmpty()) { + policyItem = policy.getPolicyItems().get(0); + } else if (POLICY_TYPE_DENY.equals(type) && !policy.getDenyPolicyItems().isEmpty()) { + policyItem = policy.getDenyPolicyItems().get(0); + } + } else { + if (!policy.getPolicyItems().isEmpty()) { + policyItem = policy.getPolicyItems().get(0); + } else if (!policy.getDenyPolicyItems().isEmpty()) { + policyItem = policy.getDenyPolicyItems().get(0); + } + } + + if (policyItem != null) { + List policyActions = new ArrayList<>(); + if (!policyItem.getAccesses().isEmpty()) { + policyActions = policyItem.getAccesses().stream().map(x -> x.getType()).collect(Collectors.toList()); + } + if (AuthorizerCommon.arrayListContains(policyActions, actions)) { + filteredPolicies.add(policy); + } + } + } + + RequestContext.get().endMetricRecord(recorder); + return filteredPolicies; + } + + private static List getFilteredPoliciesForUser(List policies, String user, List groups, List roles, String type) { + AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("getFilteredPoliciesForUser"); + + List filterPolicies = new ArrayList<>(); + for(RangerPolicy policy : policies) { + RangerPolicy.RangerPolicyItem policyItem = null; + + if (StringUtils.isNotEmpty(type)) { + if (POLICY_TYPE_ALLOW.equals(type) && !policy.getPolicyItems().isEmpty()) { + policyItem = policy.getPolicyItems().get(0); + } else if (POLICY_TYPE_DENY.equals(type) && !policy.getDenyPolicyItems().isEmpty()) { + policyItem = policy.getDenyPolicyItems().get(0); + } + } else { + if (!policy.getPolicyItems().isEmpty()) { + policyItem = policy.getPolicyItems().get(0); + } else if (!policy.getDenyPolicyItems().isEmpty()) { + policyItem = policy.getDenyPolicyItems().get(0); + } + } + + if (policyItem != null) { + List policyUsers = policyItem.getUsers(); + List policyGroups = policyItem.getGroups(); + List policyRoles = policyItem.getRoles(); + if (policyUsers.contains(user) + || policyGroups.contains("public") + || AuthorizerCommon.arrayListContains(policyGroups, groups) + || AuthorizerCommon.arrayListContains(policyRoles, roles)) { + filterPolicies.add(policy); + } + } + } + + RequestContext.get().endMetricRecord(recorder); + return filterPolicies; + } +} diff --git a/repository/src/main/java/org/apache/atlas/authorizer/store/UsersStore.java b/repository/src/main/java/org/apache/atlas/authorizer/store/UsersStore.java new file mode 100644 index 0000000000..83d5f16848 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/authorizer/store/UsersStore.java @@ -0,0 +1,74 @@ +package org.apache.atlas.authorizer.store; + +import org.apache.atlas.authorizer.authorizers.AuthorizerCommon; +import org.apache.atlas.plugin.model.RangerRole; +import org.apache.atlas.plugin.util.RangerRoles; +import org.apache.atlas.plugin.util.RangerUserStore; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class UsersStore { + + private static RangerUserStore userStore; + private static RangerRoles allRoles; + + public static void setUserStore(RangerUserStore userStore) { + UsersStore.userStore = userStore; + } + + public static RangerUserStore getUserStore() { + return userStore; + } + + public static void setAllRoles(RangerRoles allRoles) { + UsersStore.allRoles = allRoles; + } + + public static RangerRoles getAllRoles() { + return allRoles; + } + + public static List getGroupsForUser(String user, RangerUserStore userStore) { + Map> userGroupMapping = userStore.getUserGroupMapping(); + List groups = new ArrayList<>(); + Set groupsSet = userGroupMapping.get(user); + if (groupsSet != null && !groupsSet.isEmpty()) { + groups.addAll(groupsSet); + } + return groups; + } + + public static List getRolesForUser(String user, RangerRoles allRoles) { + List roles = new ArrayList<>(); + Set rangerRoles = allRoles.getRangerRoles(); + for (RangerRole role : rangerRoles) { + List users = role.getUsers(); + for (RangerRole.RoleMember roleUser: users) { + if (roleUser.getName().equals(user)) { + roles.add(role.getName()); + } + } + } + return roles; + } + + public static List getNestedRolesForUser(List userRoles, RangerRoles allRoles) { + List ret = new ArrayList<>(); + Set rangerRoles = allRoles.getRangerRoles(); + for (RangerRole role : rangerRoles) { + List nestedRoles = role.getRoles(); + List nestedRolesName = new ArrayList<>(); + for (RangerRole.RoleMember nestedRole : nestedRoles) { + nestedRolesName.add(nestedRole.getName()); + } + if (AuthorizerCommon.arrayListContains(userRoles, nestedRolesName)) { + ret.add(role.getName()); + } + } + return ret; + } + +} diff --git a/repository/src/main/java/org/apache/atlas/discovery/EntityDiscoveryService.java b/repository/src/main/java/org/apache/atlas/discovery/EntityDiscoveryService.java index b1b120c3fb..5014a4ed9e 100644 --- a/repository/src/main/java/org/apache/atlas/discovery/EntityDiscoveryService.java +++ b/repository/src/main/java/org/apache/atlas/discovery/EntityDiscoveryService.java @@ -17,11 +17,15 @@ */ package org.apache.atlas.discovery; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.annotations.VisibleForTesting; import org.apache.atlas.*; import org.apache.atlas.annotation.GraphTransaction; import org.apache.atlas.authorize.AtlasAuthorizationUtils; import org.apache.atlas.authorize.AtlasSearchResultScrubRequest; +import org.apache.atlas.authorizer.NewAuthorizerUtils; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.discovery.*; import org.apache.atlas.model.discovery.AtlasSearchResult.AtlasFullTextResult; @@ -37,14 +41,18 @@ import org.apache.atlas.query.executors.ScriptEngineBasedExecutor; import org.apache.atlas.query.executors.TraversalBasedExecutor; import org.apache.atlas.repository.Constants; +import org.apache.atlas.repository.audit.ESBasedAuditRepository; import org.apache.atlas.repository.graph.GraphBackedSearchIndexer; import org.apache.atlas.repository.graph.GraphHelper; +import org.apache.atlas.repository.audit.ESBasedAuditRepository; import org.apache.atlas.repository.graphdb.*; import org.apache.atlas.repository.graphdb.AtlasIndexQuery.Result; import org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2; import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; import org.apache.atlas.repository.userprofile.UserProfileService; +import org.apache.atlas.repository.util.AccessControlUtils; import org.apache.atlas.searchlog.ESSearchLogger; +import org.apache.atlas.service.FeatureFlagStore; import org.apache.atlas.stats.StatsClient; import org.apache.atlas.type.*; import org.apache.atlas.type.AtlasBuiltInTypes.AtlasObjectIdType; @@ -75,9 +83,9 @@ import static org.apache.atlas.SortOrder.ASCENDING; import static org.apache.atlas.model.instance.AtlasEntity.Status.ACTIVE; import static org.apache.atlas.model.instance.AtlasEntity.Status.DELETED; -import static org.apache.atlas.repository.Constants.ASSET_ENTITY_TYPE; -import static org.apache.atlas.repository.Constants.OWNER_ATTRIBUTE; -import static org.apache.atlas.repository.Constants.VERTEX_INDEX_NAME; +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.util.AccessControlUtils.ACCESS_READ_DOMAIN; +import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; import static org.apache.atlas.util.AtlasGremlinQueryProvider.AtlasGremlinQuery.BASIC_SEARCH_STATE_FILTER; import static org.apache.atlas.util.AtlasGremlinQueryProvider.AtlasGremlinQuery.TO_RANGE_LIST; @@ -103,11 +111,11 @@ public class EntityDiscoveryService implements AtlasDiscoveryService { @Inject public EntityDiscoveryService(AtlasTypeRegistry typeRegistry, - AtlasGraph graph, - GraphBackedSearchIndexer indexer, - SearchTracker searchTracker, - UserProfileService userProfileService, - StatsClient statsClient) throws AtlasException { + AtlasGraph graph, + GraphBackedSearchIndexer indexer, + SearchTracker searchTracker, + UserProfileService userProfileService, + StatsClient statsClient) throws AtlasException { this.graph = graph; this.entityRetriever = new EntityGraphRetriever(this.graph, typeRegistry); this.indexer = indexer; @@ -996,6 +1004,10 @@ public AtlasSearchResult directIndexSearch(SearchParams searchParams) throws Atl String indexName = getIndexName(params); indexQuery = graph.elasticsearchQuery(indexName); + if (searchParams.getEnableFullRestriction()) { + addPreFiltersToSearchQuery(searchParams); + } + //LOG.info(searchParams.getQuery()); AtlasPerfMetrics.MetricRecorder elasticSearchQueryMetric = RequestContext.get().startMetricRecord("elasticSearchQuery"); DirectIndexQueryResult indexQueryResult = indexQuery.vertices(searchParams); if (indexQueryResult == null) { @@ -1098,8 +1110,10 @@ private void prepareSearchResult(AtlasSearchResult ret, DirectIndexQueryResult i header.setCollapse(collapse); } } - - if (searchParams.isShowHighlights()) { + if (searchParams.getShowSearchMetadata()) { + ret.addHighlights(header.getGuid(), result.getHighLights()); + ret.addSort(header.getGuid(), result.getSort()); + } else if (searchParams.getShowHighlights()) { ret.addHighlights(header.getGuid(), result.getHighLights()); } @@ -1108,7 +1122,10 @@ private void prepareSearchResult(AtlasSearchResult ret, DirectIndexQueryResult i } catch (Exception e) { throw e; } - scrubSearchResults(ret, searchParams.getSuppressLogs()); + + if (!searchParams.getEnableFullRestriction()) { + scrubSearchResults(ret, searchParams.getSuppressLogs()); + } } private Map getMap(String key, Object value) { @@ -1134,8 +1151,10 @@ public List searchUsingTermQualifiedName(int from, int size, } private String getIndexName(IndexSearchParams params) throws AtlasBaseException { + String vertexIndexName = getESIndex(); + if (StringUtils.isEmpty(params.getPersona()) && StringUtils.isEmpty(params.getPurpose())) { - return VERTEX_INDEX_NAME; + return vertexIndexName; } String qualifiedName = ""; @@ -1145,13 +1164,12 @@ private String getIndexName(IndexSearchParams params) throws AtlasBaseException qualifiedName = params.getPurpose(); } - String[] parts = qualifiedName.split("/"); - String aliasName = parts[parts.length - 1]; + String aliasName = AccessControlUtils.getESAliasName(qualifiedName); if (StringUtils.isNotEmpty(aliasName)) { if(params.isAccessControlExclusive()) { accessControlExclusiveDsl(params, aliasName); - aliasName = aliasName+","+VERTEX_INDEX_NAME; + aliasName = aliasName+","+vertexIndexName; } return aliasName; } else { @@ -1160,7 +1178,6 @@ private String getIndexName(IndexSearchParams params) throws AtlasBaseException } private void accessControlExclusiveDsl(IndexSearchParams params, String aliasName) { - List> mustClauses = new ArrayList<>(); Map clientQuery = (Map) params.getDsl().get("query"); @@ -1202,4 +1219,44 @@ private Map getStaticBoolQuery() { return getMap("bool", boolQuery); } + + private void addPreFiltersToSearchQuery(SearchParams searchParams) { + try { + String persona = ((IndexSearchParams) searchParams).getPersona(); + String purpose = ((IndexSearchParams) searchParams).getPurpose(); + + AtlasPerfMetrics.MetricRecorder addPreFiltersToSearchQueryMetric = RequestContext.get().startMetricRecord("addPreFiltersToSearchQuery"); + ObjectMapper mapper = new ObjectMapper(); + List> mustClauseList = new ArrayList<>(); + + List actions = new ArrayList<>(); + actions.add("entity-read"); + + Map allPreFiltersBoolClause = NewAuthorizerUtils.getPreFilterDsl(persona, purpose, actions); + mustClauseList.add(allPreFiltersBoolClause); + + mustClauseList.add((Map) ((IndexSearchParams) searchParams).getDsl().get("query")); + + String dslString = searchParams.getQuery(); + JsonNode node = mapper.readTree(dslString); + /*JsonNode userQueryNode = node.get("query"); + if (userQueryNode != null) { + + String userQueryString = userQueryNode.toString(); + + String userQueryBase64 = Base64.getEncoder().encodeToString(userQueryString.getBytes()); + mustClauseList.add(getMap("wrapper", getMap("query", userQueryBase64))); + }*/ + + JsonNode updateQueryNode = mapper.valueToTree(getMap("bool", getMap("must", mustClauseList))); + + ((ObjectNode) node).set("query", updateQueryNode); + searchParams.setQuery(node.toString()); + + RequestContext.get().endMetricRecord(addPreFiltersToSearchQueryMetric); + + } catch (Exception e) { + LOG.error("Error -> addPreFiltersToSearchQuery!", e); + } + } } diff --git a/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java b/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java index dfd74ea896..ede15a6dc3 100644 --- a/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java +++ b/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java @@ -69,11 +69,10 @@ import static org.apache.atlas.AtlasClient.DATA_SET_SUPER_TYPE; import static org.apache.atlas.AtlasClient.PROCESS_SUPER_TYPE; -import static org.apache.atlas.AtlasErrorCode.INSTANCE_LINEAGE_QUERY_FAILED; +import static org.apache.atlas.AtlasErrorCode.*; import static org.apache.atlas.model.instance.AtlasEntity.Status.DELETED; import static org.apache.atlas.model.lineage.AtlasLineageInfo.LineageDirection.*; -import static org.apache.atlas.repository.Constants.ACTIVE_STATE_VALUE; -import static org.apache.atlas.repository.Constants.RELATIONSHIP_GUID_PROPERTY_KEY; +import static org.apache.atlas.repository.Constants.*; import static org.apache.atlas.repository.graph.GraphHelper.*; import static org.apache.atlas.repository.graphdb.AtlasEdgeDirection.IN; import static org.apache.atlas.repository.graphdb.AtlasEdgeDirection.OUT; @@ -85,11 +84,25 @@ public class EntityLineageService implements AtlasLineageService { private static final String PROCESS_INPUTS_EDGE = "__Process.inputs"; private static final String PROCESS_OUTPUTS_EDGE = "__Process.outputs"; + private static final String OUTPUT_PORT_EDGE = "__Asset.outputPortDataProducts"; + private static final String INPUT_PORT_EDGE = "__Asset.inputPortDataProducts"; + + /** + * String[] => [Input edge Label, Output Edge Label] + */ + public static final HashMap LINEAGE_MAP = new HashMap(){{ + put(DATASET_PROCESS_LINEAGE, new String[]{PROCESS_INPUTS_EDGE, PROCESS_OUTPUTS_EDGE}); + put(PRODUCT_ASSET_LINEAGE, new String[]{OUTPUT_PORT_EDGE, INPUT_PORT_EDGE}); + }}; private static final String COLUMNS = "columns"; private static final boolean LINEAGE_USING_GREMLIN = AtlasConfiguration.LINEAGE_USING_GREMLIN.getBoolean(); private static final Integer DEFAULT_LINEAGE_MAX_NODE_COUNT = 9000; private static final int LINEAGE_ON_DEMAND_DEFAULT_DEPTH = 3; private static final String SEPARATOR = "->"; + public static final String IS_DATA_PRODUCT = "isDataProduct"; + public static final String IS_DATASET = "isProcess"; + public static final String PRODUCT_ASSET_LINEAGE = "ProductAssetLineage"; + public static final String DATASET_PROCESS_LINEAGE = "DatasetProcessLineage"; private final AtlasGraph graph; private final AtlasGremlinQueryProvider gremlinQueryProvider; @@ -176,8 +189,8 @@ public AtlasLineageOnDemandInfo getAtlasLineageInfo(String guid, LineageOnDemand RequestContext.get().setRelationAttrsForSearch(lineageOnDemandRequest.getRelationAttributes()); AtlasLineageOnDemandContext atlasLineageOnDemandContext = new AtlasLineageOnDemandContext(lineageOnDemandRequest, atlasTypeRegistry); - boolean isDataSet = validateEntityTypeAndCheckIfDataSet(guid); - AtlasLineageOnDemandInfo ret = getLineageInfoOnDemand(guid, atlasLineageOnDemandContext, isDataSet); + HashMap dataTypeMap = validateAndGetEntityTypeMap(guid); + AtlasLineageOnDemandInfo ret = getLineageInfoOnDemand(guid, atlasLineageOnDemandContext, dataTypeMap); appendLineageOnDemandPayload(ret, lineageOnDemandRequest); // filtering out on-demand relations which has input & output nodes within the limit cleanupRelationsOnDemand(ret); @@ -200,20 +213,24 @@ public AtlasLineageListInfo getLineageListInfoOnDemand(String guid, LineageListR return ret; } - private boolean validateEntityTypeAndCheckIfDataSet(String guid) throws AtlasBaseException { + private HashMap validateAndGetEntityTypeMap(String guid) throws AtlasBaseException { String typeName = entityRetriever.getEntityVertex(guid).getProperty(Constants.TYPE_NAME_PROPERTY_KEY, String.class); AtlasEntityType entityType = atlasTypeRegistry.getEntityTypeByName(typeName); if (entityType == null) { throw new AtlasBaseException(AtlasErrorCode.TYPE_NAME_NOT_FOUND, typeName); } + HashMap dataTypeMap = new HashMap<>(); boolean isProcess = entityType.getTypeAndAllSuperTypes().contains(PROCESS_SUPER_TYPE); + boolean isDataProduct = entityType.getTypeName().equals(DATA_PRODUCT_ENTITY_TYPE); + dataTypeMap.put(IS_DATA_PRODUCT, isDataProduct); + dataTypeMap.put(IS_DATASET, !isProcess); if (!isProcess) { boolean isDataSet = entityType.getTypeAndAllSuperTypes().contains(DATA_SET_SUPER_TYPE); if (!isDataSet) { throw new AtlasBaseException(AtlasErrorCode.INVALID_LINEAGE_ENTITY_TYPE, guid, typeName); } } - return !isProcess; + return dataTypeMap; } private LineageOnDemandConstraints getLineageConstraints(String guid, LineageOnDemandBaseParams defaultParams) { @@ -278,11 +295,15 @@ private void cleanupRelationsOnDemand(AtlasLineageOnDemandInfo lineageInfo) { } } - private AtlasLineageOnDemandInfo getLineageInfoOnDemand(String guid, AtlasLineageOnDemandContext atlasLineageOnDemandContext, boolean isDataSet) throws AtlasBaseException { + private AtlasLineageOnDemandInfo getLineageInfoOnDemand(String guid, AtlasLineageOnDemandContext atlasLineageOnDemandContext, HashMap dataTypeMap) throws AtlasBaseException { AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("getLineageInfoOnDemand"); + String lineageType = RequestContext.get().getLineageType(); + if(StringUtils.isEmpty(lineageType)) + lineageType = DATASET_PROCESS_LINEAGE; LineageOnDemandConstraints lineageConstraintsByGuid = getAndValidateLineageConstraintsByGuid(guid, atlasLineageOnDemandContext); AtlasLineageOnDemandInfo.LineageDirection direction = lineageConstraintsByGuid.getDirection(); + int level = 0; int depth = lineageConstraintsByGuid.getDepth(); AtlasLineageOnDemandInfo ret = initializeLineageOnDemandInfo(guid); @@ -293,33 +314,50 @@ private AtlasLineageOnDemandInfo getLineageInfoOnDemand(String guid, AtlasLineag AtomicInteger inputEntitiesTraversed = new AtomicInteger(0); AtomicInteger outputEntitiesTraversed = new AtomicInteger(0); - if (isDataSet) { - AtlasVertex datasetVertex = AtlasGraphUtilsV2.findByGuid(this.graph, guid); - if (direction == AtlasLineageOnDemandInfo.LineageDirection.INPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH) - traverseEdgesOnDemand(datasetVertex, true, depth, new HashSet<>(), atlasLineageOnDemandContext, ret, guid, inputEntitiesTraversed); - if (direction == AtlasLineageOnDemandInfo.LineageDirection.OUTPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH) - traverseEdgesOnDemand(datasetVertex, false, depth, new HashSet<>(), atlasLineageOnDemandContext, ret, guid, outputEntitiesTraversed); - AtlasEntityHeader baseEntityHeader = entityRetriever.toAtlasEntityHeader(datasetVertex, atlasLineageOnDemandContext.getAttributes()); - ret.getGuidEntityMap().put(guid, baseEntityHeader); - } else { - AtlasVertex processVertex = AtlasGraphUtilsV2.findByGuid(this.graph, guid); - // make one hop to the next dataset vertices from process vertex and traverse with 'depth = depth - 1' - if (direction == AtlasLineageOnDemandInfo.LineageDirection.INPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH) { - Iterator processEdges = processVertex.getEdges(AtlasEdgeDirection.OUT, PROCESS_INPUTS_EDGE).iterator(); - traverseEdgesOnDemand(processEdges, true, depth, atlasLineageOnDemandContext, ret, processVertex, guid, inputEntitiesTraversed); - } - if (direction == AtlasLineageOnDemandInfo.LineageDirection.OUTPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH) { - Iterator processEdges = processVertex.getEdges(AtlasEdgeDirection.OUT, PROCESS_OUTPUTS_EDGE).iterator(); - traverseEdgesOnDemand(processEdges, false, depth, atlasLineageOnDemandContext, ret, processVertex, guid, outputEntitiesTraversed); + AtomicInteger traversalOrder = new AtomicInteger(1); + String[] lineageEdgeLabels = LINEAGE_MAP.get(lineageType); + boolean isConnecterVertex; + + + isConnecterVertex = lineageType.equals(PRODUCT_ASSET_LINEAGE) + ? !dataTypeMap.get(IS_DATA_PRODUCT) + : !dataTypeMap.get(IS_DATASET); + + if (!isConnecterVertex) { + AtlasVertex datasetVertex = AtlasGraphUtilsV2.findByGuid(this.graph, guid); + if (direction == AtlasLineageOnDemandInfo.LineageDirection.INPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH) + traverseEdgesOnDemand(datasetVertex, true, depth, level, new HashSet<>(), atlasLineageOnDemandContext, ret, guid, inputEntitiesTraversed, traversalOrder); + if (direction == AtlasLineageOnDemandInfo.LineageDirection.OUTPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH) + traverseEdgesOnDemand(datasetVertex, false, depth, level, new HashSet<>(), atlasLineageOnDemandContext, ret, guid, outputEntitiesTraversed, traversalOrder); + AtlasEntityHeader baseEntityHeader = entityRetriever.toAtlasEntityHeader(datasetVertex, atlasLineageOnDemandContext.getAttributes()); + setGraphTraversalMetadata(level, traversalOrder, baseEntityHeader); + ret.getGuidEntityMap().put(guid, baseEntityHeader); + } else { + AtlasVertex processVertex = AtlasGraphUtilsV2.findByGuid(this.graph, guid); + // make one hop to the next dataset vertices from process vertex and traverse with 'depth = depth - 1' + if (direction == AtlasLineageOnDemandInfo.LineageDirection.INPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH) { + Iterator processEdges = processVertex.getEdges(AtlasEdgeDirection.OUT, lineageEdgeLabels[0]).iterator(); + traverseEdgesOnDemand(processEdges, true, depth, level, atlasLineageOnDemandContext, ret, processVertex, guid, inputEntitiesTraversed, traversalOrder); + } + if (direction == AtlasLineageOnDemandInfo.LineageDirection.OUTPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH) { + Iterator processEdges = processVertex.getEdges(AtlasEdgeDirection.OUT, lineageEdgeLabels[1]).iterator(); + traverseEdgesOnDemand(processEdges, false, depth, level, atlasLineageOnDemandContext, ret, processVertex, guid, outputEntitiesTraversed, traversalOrder); + } } - } RequestContext.get().endMetricRecord(metricRecorder); return ret; } + private static void setGraphTraversalMetadata(int level, AtomicInteger traversalOrder, AtlasEntityHeader baseEntityHeader) { + baseEntityHeader.setDepth(level); + baseEntityHeader.setTraversalOrder(0); + baseEntityHeader.setFinishTime(traversalOrder.get()); + } - private void traverseEdgesOnDemand(Iterator processEdges, boolean isInput, int depth, AtlasLineageOnDemandContext atlasLineageOnDemandContext, AtlasLineageOnDemandInfo ret, AtlasVertex processVertex, String baseGuid, AtomicInteger entitiesTraversed) throws AtlasBaseException { + private void traverseEdgesOnDemand(Iterator processEdges, boolean isInput, int depth, int level, AtlasLineageOnDemandContext atlasLineageOnDemandContext, AtlasLineageOnDemandInfo ret, AtlasVertex processVertex, String baseGuid, AtomicInteger entitiesTraversed, AtomicInteger traversalOrder) throws AtlasBaseException { AtlasLineageOnDemandInfo.LineageDirection direction = isInput ? AtlasLineageOnDemandInfo.LineageDirection.INPUT : AtlasLineageOnDemandInfo.LineageDirection.OUTPUT; + int nextLevel = isInput ? level - 1: level + 1; + String lineageType = RequestContext.get().getLineageType(); while (processEdges.hasNext()) { AtlasEdge processEdge = processEdges.next(); AtlasVertex datasetVertex = processEdge.getInVertex(); @@ -332,11 +370,12 @@ private void traverseEdgesOnDemand(Iterator processEdges, boolean isI continue; } - boolean isInputEdge = processEdge.getLabel().equalsIgnoreCase(PROCESS_INPUTS_EDGE); - if (incrementAndCheckIfRelationsLimitReached(processEdge, isInputEdge, atlasLineageOnDemandContext, ret, depth, entitiesTraversed, direction)) { + boolean isInputEdge = processEdge.getLabel().equalsIgnoreCase(LINEAGE_MAP.get(lineageType)[0]); + if (incrementAndCheckIfRelationsLimitReached(processEdge, isInputEdge, atlasLineageOnDemandContext, ret, depth, entitiesTraversed, direction, new HashSet<>())) { break; } else { - addEdgeToResult(processEdge, ret, atlasLineageOnDemandContext); + addEdgeToResult(processEdge, ret, atlasLineageOnDemandContext, nextLevel, traversalOrder); + traversalOrder.incrementAndGet(); } String inGuid = AtlasGraphUtilsV2.getIdFromVertex(datasetVertex); @@ -346,29 +385,31 @@ private void traverseEdgesOnDemand(Iterator processEdges, boolean isI ret.getRelationsOnDemand().put(inGuid, new LineageInfoOnDemand(inGuidLineageConstrains)); } - traverseEdgesOnDemand(datasetVertex, isInput, depth - 1, new HashSet<>(), atlasLineageOnDemandContext, ret, baseGuid, entitiesTraversed); + traverseEdgesOnDemand(datasetVertex, isInput, depth - 1, nextLevel, new HashSet<>(), atlasLineageOnDemandContext, ret, baseGuid, entitiesTraversed, traversalOrder); } } - private void traverseEdgesOnDemand(AtlasVertex datasetVertex, boolean isInput, int depth, Set visitedVertices, AtlasLineageOnDemandContext atlasLineageOnDemandContext, AtlasLineageOnDemandInfo ret, String baseGuid, AtomicInteger entitiesTraversed) throws AtlasBaseException { + private void traverseEdgesOnDemand(AtlasVertex datasetVertex, boolean isInput, int depth, int level, Set visitedVertices, AtlasLineageOnDemandContext atlasLineageOnDemandContext, AtlasLineageOnDemandInfo ret, String baseGuid, AtomicInteger entitiesTraversed, AtomicInteger traversalOrder) throws AtlasBaseException { if (isEntityTraversalLimitReached(entitiesTraversed)) return; + String lineageType = RequestContext.get().getLineageType(); if (depth != 0) { // base condition of recursion for depth AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("traverseEdgesOnDemand"); AtlasLineageOnDemandInfo.LineageDirection direction = isInput ? AtlasLineageOnDemandInfo.LineageDirection.INPUT : AtlasLineageOnDemandInfo.LineageDirection.OUTPUT; - + int nextLevel = isInput ? level - 1: level + 1; // keep track of visited vertices to avoid circular loop visitedVertices.add(getId(datasetVertex)); - + String[] edgeDirections = LINEAGE_MAP.get(lineageType); AtlasPerfMetrics.MetricRecorder traverseEdgesOnDemandGetEdgesIn = RequestContext.get().startMetricRecord("traverseEdgesOnDemandGetEdgesIn"); - Iterator incomingEdges = datasetVertex.getEdges(IN, isInput ? PROCESS_OUTPUTS_EDGE : PROCESS_INPUTS_EDGE).iterator(); + + Iterator incomingEdges = datasetVertex.getEdges(IN, isInput ? edgeDirections[1] : edgeDirections[0]).iterator(); RequestContext.get().endMetricRecord(traverseEdgesOnDemandGetEdgesIn); while (incomingEdges.hasNext()) { AtlasEdge incomingEdge = incomingEdges.next(); - AtlasVertex processVertex = incomingEdge.getOutVertex(); + AtlasVertex connecterVertex = incomingEdge.getOutVertex(); - if (!vertexMatchesEvaluation(processVertex, atlasLineageOnDemandContext) || !edgeMatchesEvaluation(incomingEdge, atlasLineageOnDemandContext)) { + if (!vertexMatchesEvaluation(connecterVertex, atlasLineageOnDemandContext) || !edgeMatchesEvaluation(incomingEdge, atlasLineageOnDemandContext)) { continue; } @@ -376,7 +417,7 @@ private void traverseEdgesOnDemand(AtlasVertex datasetVertex, boolean isInput, i continue; } - if (incrementAndCheckIfRelationsLimitReached(incomingEdge, !isInput, atlasLineageOnDemandContext, ret, depth, entitiesTraversed, direction)) { + if (incrementAndCheckIfRelationsLimitReached(incomingEdge, !isInput, atlasLineageOnDemandContext, ret, depth, entitiesTraversed, direction, visitedVertices)) { LineageInfoOnDemand entityOnDemandInfo = ret.getRelationsOnDemand().get(baseGuid); if (entityOnDemandInfo == null) continue; @@ -385,11 +426,11 @@ private void traverseEdgesOnDemand(AtlasVertex datasetVertex, boolean isInput, i else continue; } else { - addEdgeToResult(incomingEdge, ret, atlasLineageOnDemandContext); + addEdgeToResult(incomingEdge, ret, atlasLineageOnDemandContext, level, traversalOrder); } AtlasPerfMetrics.MetricRecorder traverseEdgesOnDemandGetEdgesOut = RequestContext.get().startMetricRecord("traverseEdgesOnDemandGetEdgesOut"); - Iterator outgoingEdges = processVertex.getEdges(OUT, isInput ? PROCESS_INPUTS_EDGE : PROCESS_OUTPUTS_EDGE).iterator(); + Iterator outgoingEdges = connecterVertex.getEdges(OUT, isInput ? edgeDirections[0] : edgeDirections[1]).iterator(); RequestContext.get().endMetricRecord(traverseEdgesOnDemandGetEdgesOut); while (outgoingEdges.hasNext()) { @@ -400,11 +441,11 @@ private void traverseEdgesOnDemand(AtlasVertex datasetVertex, boolean isInput, i continue; } - if (checkForOffset(outgoingEdge, processVertex, atlasLineageOnDemandContext, ret)) { + if (checkForOffset(outgoingEdge, connecterVertex, atlasLineageOnDemandContext, ret)) { continue; } - if (incrementAndCheckIfRelationsLimitReached(outgoingEdge, isInput, atlasLineageOnDemandContext, ret, depth, entitiesTraversed, direction)) { - String processGuid = AtlasGraphUtilsV2.getIdFromVertex(processVertex); + if (incrementAndCheckIfRelationsLimitReached(outgoingEdge, isInput, atlasLineageOnDemandContext, ret, depth, entitiesTraversed, direction, visitedVertices)) { + String processGuid = AtlasGraphUtilsV2.getIdFromVertex(connecterVertex); LineageInfoOnDemand entityOnDemandInfo = ret.getRelationsOnDemand().get(processGuid); if (entityOnDemandInfo == null) continue; @@ -413,13 +454,16 @@ private void traverseEdgesOnDemand(AtlasVertex datasetVertex, boolean isInput, i else continue; } else { - addEdgeToResult(outgoingEdge, ret, atlasLineageOnDemandContext); + addEdgeToResult(outgoingEdge, ret, atlasLineageOnDemandContext, nextLevel, traversalOrder); entitiesTraversed.incrementAndGet(); + traversalOrder.incrementAndGet(); if (isEntityTraversalLimitReached(entitiesTraversed)) setEntityLimitReachedFlag(isInput, ret); } if (entityVertex != null && !visitedVertices.contains(getId(entityVertex))) { - traverseEdgesOnDemand(entityVertex, isInput, depth - 1, visitedVertices, atlasLineageOnDemandContext, ret, baseGuid, entitiesTraversed); // execute inner depth + traverseEdgesOnDemand(entityVertex, isInput, depth - 1, nextLevel, visitedVertices, atlasLineageOnDemandContext, ret, baseGuid, entitiesTraversed, traversalOrder); // execute inner depth + AtlasEntityHeader traversedEntity = ret.getGuidEntityMap().get(AtlasGraphUtilsV2.getIdFromVertex(entityVertex)); + traversedEntity.setFinishTime(traversalOrder.get()); } } } @@ -437,18 +481,31 @@ private static void setEntityLimitReachedFlag(boolean isInput, AtlasLineageOnDem private void traverseEdgesUsingBFS(String baseGuid, AtlasLineageListContext lineageListContext, AtlasLineageListInfo ret) throws AtlasBaseException { AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("traverseEdgesUsingBFS"); - + String lineageType = RequestContext.get().getLineageType(); Set visitedVertices = new HashSet<>(); visitedVertices.add(baseGuid); Set skippedVertices = new HashSet<>(); Queue traversalQueue = new LinkedList<>(); AtlasVertex baseVertex = AtlasGraphUtilsV2.findByGuid(this.graph, baseGuid); - enqueueNeighbours(baseVertex, validateEntityTypeAndCheckIfDataSet(baseGuid), lineageListContext, traversalQueue, visitedVertices, skippedVertices); + HashMap dataTypeMap = validateAndGetEntityTypeMap(baseGuid); + if (StringUtils.isEmpty(lineageType)){ + lineageType = DATASET_PROCESS_LINEAGE; + } + boolean isNotConnecterVertex = lineageType.equals(PRODUCT_ASSET_LINEAGE) + ? dataTypeMap.get(IS_DATA_PRODUCT) + : dataTypeMap.get(IS_DATASET); + enqueueNeighbours(baseVertex, dataTypeMap, lineageListContext, traversalQueue, visitedVertices, skippedVertices); int currentDepth = 0; + int currentLevel = isNotConnecterVertex? 0: 1; while (!traversalQueue.isEmpty() && !lineageListContext.isEntityLimitReached() && currentDepth < lineageListContext.getDepth()) { currentDepth++; + + // update level at every alternate depth + if ((isNotConnecterVertex && currentDepth % 2 != 0) || (!isNotConnecterVertex && currentDepth % 2 == 0)) + currentLevel++; + int entitiesInCurrentDepth = traversalQueue.size(); for (int i = 0; i < entitiesInCurrentDepth; i++) { if (lineageListContext.isEntityLimitReached()) @@ -459,20 +516,20 @@ private void traverseEdgesUsingBFS(String baseGuid, AtlasLineageListContext line if (Objects.isNull(currentVertex)) throw new AtlasBaseException("Found null vertex during lineage graph traversal for guid: " + currentGUID); - boolean isDataset = validateEntityTypeAndCheckIfDataSet(currentGUID); + HashMap currentEntityDataTypeMap = validateAndGetEntityTypeMap(currentGUID); if (!lineageListContext.evaluateVertexFilter(currentVertex)) { - enqueueNeighbours(currentVertex, isDataset, lineageListContext, traversalQueue, visitedVertices, skippedVertices); + enqueueNeighbours(currentVertex, currentEntityDataTypeMap, lineageListContext, traversalQueue, visitedVertices, skippedVertices); continue; } if (checkOffsetAndSkipEntity(lineageListContext, ret)) { skippedVertices.add(currentGUID); - enqueueNeighbours(currentVertex, isDataset, lineageListContext, traversalQueue, visitedVertices, skippedVertices); + enqueueNeighbours(currentVertex, currentEntityDataTypeMap, lineageListContext, traversalQueue, visitedVertices, skippedVertices); continue; } lineageListContext.incrementEntityCount(); - appendToResult(currentVertex, lineageListContext, ret); - enqueueNeighbours(currentVertex, isDataset, lineageListContext, traversalQueue, visitedVertices, skippedVertices); + appendToResult(currentVertex, lineageListContext, ret, currentLevel); + enqueueNeighbours(currentVertex, currentEntityDataTypeMap, lineageListContext, traversalQueue, visitedVertices, skippedVertices); if (isLastEntityInLastDepth(lineageListContext.getDepth(), currentDepth, entitiesInCurrentDepth, i)) { ret.setHasMore(false); lineageListContext.setHasMoreUpdated(true); @@ -486,14 +543,19 @@ private void traverseEdgesUsingBFS(String baseGuid, AtlasLineageListContext line RequestContext.get().endMetricRecord(metricRecorder); } - private void enqueueNeighbours(AtlasVertex currentVertex, boolean isDataset, AtlasLineageListContext lineageListContext, + private void enqueueNeighbours(AtlasVertex currentVertex, HashMap dataTypeMap, AtlasLineageListContext lineageListContext, Queue traversalQueue, Set visitedVertices, Set skippedVertices) { AtlasPerfMetrics.MetricRecorder traverseEdgesOnDemandGetEdges = RequestContext.get().startMetricRecord("traverseEdgesOnDemandGetEdges"); Iterator edges; - if (isDataset) - edges = currentVertex.getEdges(IN, isInputDirection(lineageListContext) ? PROCESS_OUTPUTS_EDGE : PROCESS_INPUTS_EDGE).iterator(); + String lineageType = RequestContext.get().getLineageType(); + boolean isConnecterVertex = lineageType.equals(PRODUCT_ASSET_LINEAGE) + ? !dataTypeMap.get(IS_DATA_PRODUCT) + : !dataTypeMap.get(IS_DATASET); + String[] edgeDirectionLabels = LINEAGE_MAP.get(lineageType); + if (!isConnecterVertex) + edges = currentVertex.getEdges(IN, isInputDirection(lineageListContext) ? edgeDirectionLabels[1] : edgeDirectionLabels[0]).iterator(); else - edges = currentVertex.getEdges(OUT, isInputDirection(lineageListContext) ? PROCESS_INPUTS_EDGE : PROCESS_OUTPUTS_EDGE).iterator(); + edges = currentVertex.getEdges(OUT, isInputDirection(lineageListContext) ? edgeDirectionLabels[0] : edgeDirectionLabels[1]).iterator(); RequestContext.get().endMetricRecord(traverseEdgesOnDemandGetEdges); while (edges.hasNext()) { @@ -501,7 +563,7 @@ private void enqueueNeighbours(AtlasVertex currentVertex, boolean isDataset, Atl if (!lineageListContext.evaluateTraversalFilter(currentEdge)) continue; AtlasVertex neighbourVertex; - if (isDataset) + if (!isConnecterVertex) neighbourVertex = currentEdge.getOutVertex(); else neighbourVertex = currentEdge.getInVertex(); @@ -518,8 +580,10 @@ private void enqueueNeighbours(AtlasVertex currentVertex, boolean isDataset, Atl } } - private void appendToResult(AtlasVertex currentVertex, AtlasLineageListContext lineageListContext, AtlasLineageListInfo ret) throws AtlasBaseException { - ret.getEntities().add(entityRetriever.toAtlasEntityHeader(currentVertex, lineageListContext.getAttributes())); + private void appendToResult(AtlasVertex currentVertex, AtlasLineageListContext lineageListContext, AtlasLineageListInfo ret, int currentLevel) throws AtlasBaseException { + AtlasEntityHeader entity = entityRetriever.toAtlasEntityHeader(currentVertex, lineageListContext.getAttributes()); + entity.setDepth(currentLevel); + ret.getEntities().add(entity); } private static void addEntitiesToCache(AtlasVertex vertex) { @@ -574,10 +638,8 @@ private static String getId(AtlasVertex vertex) { return vertex.getIdForDisplay(); } - private boolean incrementAndCheckIfRelationsLimitReached(AtlasEdge atlasEdge, boolean isInput, AtlasLineageOnDemandContext atlasLineageOnDemandContext, AtlasLineageOnDemandInfo ret, int depth, AtomicInteger entitiesTraversed, AtlasLineageOnDemandInfo.LineageDirection direction) { + private boolean incrementAndCheckIfRelationsLimitReached(AtlasEdge atlasEdge, boolean isInput, AtlasLineageOnDemandContext atlasLineageOnDemandContext, AtlasLineageOnDemandInfo ret, int depth, AtomicInteger entitiesTraversed, AtlasLineageOnDemandInfo.LineageDirection direction, Set visitedVertices) { AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("incrementAndCheckIfRelationsLimitReached"); - if (lineageContainsVisitedEdgeV2(ret, atlasEdge)) - return false; AtlasVertex inVertex = isInput ? atlasEdge.getOutVertex() : atlasEdge.getInVertex(); String inGuid = AtlasGraphUtilsV2.getIdFromVertex(inVertex); @@ -590,7 +652,7 @@ private boolean incrementAndCheckIfRelationsLimitReached(AtlasEdge atlasEdge, bo LineageInfoOnDemand inLineageInfo = ret.getRelationsOnDemand().containsKey(inGuid) ? ret.getRelationsOnDemand().get(inGuid) : new LineageInfoOnDemand(inGuidLineageConstraints); LineageInfoOnDemand outLineageInfo = ret.getRelationsOnDemand().containsKey(outGuid) ? ret.getRelationsOnDemand().get(outGuid) : new LineageInfoOnDemand(outGuidLineageConstraints); - setHorizontalPaginationFlags(isInput, atlasLineageOnDemandContext, ret, depth, entitiesTraversed, inVertex, inGuid, outVertex, outGuid, inLineageInfo, outLineageInfo); + setHorizontalPaginationFlags(isInput, atlasLineageOnDemandContext, ret, depth, entitiesTraversed, inVertex, inGuid, outVertex, outGuid, inLineageInfo, outLineageInfo, visitedVertices); boolean hasRelationsLimitReached = setVerticalPaginationFlags(entitiesTraversed, inLineageInfo, outLineageInfo); if (!hasRelationsLimitReached) { @@ -617,9 +679,9 @@ private boolean setVerticalPaginationFlags(AtomicInteger entitiesTraversed, Line return hasRelationsLimitReached; } - private void setHorizontalPaginationFlags(boolean isInput, AtlasLineageOnDemandContext atlasLineageOnDemandContext, AtlasLineageOnDemandInfo ret, int depth, AtomicInteger entitiesTraversed, AtlasVertex inVertex, String inGuid, AtlasVertex outVertex, String outGuid, LineageInfoOnDemand inLineageInfo, LineageInfoOnDemand outLineageInfo) { - boolean isOutVertexVisited = ret.getRelationsOnDemand().containsKey(outGuid); - boolean isInVertexVisited = ret.getRelationsOnDemand().containsKey(inGuid); + private void setHorizontalPaginationFlags(boolean isInput, AtlasLineageOnDemandContext atlasLineageOnDemandContext, AtlasLineageOnDemandInfo ret, int depth, AtomicInteger entitiesTraversed, AtlasVertex inVertex, String inGuid, AtlasVertex outVertex, String outGuid, LineageInfoOnDemand inLineageInfo, LineageInfoOnDemand outLineageInfo, Set visitedVertices) { + boolean isOutVertexVisited = visitedVertices.contains(getId(outVertex)); + boolean isInVertexVisited = visitedVertices.contains(getId(inVertex)); if (depth == 1 || entitiesTraversed.get() == getLineageMaxNodeAllowedCount()-1) { // is the vertex a leaf? if (isInput && ! isOutVertexVisited) setHasUpstream(atlasLineageOnDemandContext, outVertex, outLineageInfo); @@ -629,7 +691,8 @@ else if (!isInput && ! isInVertexVisited) } private void setHasDownstream(AtlasLineageOnDemandContext atlasLineageOnDemandContext, AtlasVertex inVertex, LineageInfoOnDemand inLineageInfo) { - List filteredEdges = getFilteredAtlasEdges(inVertex, IN, PROCESS_INPUTS_EDGE, atlasLineageOnDemandContext); + String lineageType = RequestContext.get().getLineageType(); + List filteredEdges = getFilteredAtlasEdges(inVertex, IN, LINEAGE_MAP.get(lineageType)[0], atlasLineageOnDemandContext); if (!filteredEdges.isEmpty()) { inLineageInfo.setHasDownstream(true); inLineageInfo.setTotalOutputRelationsCount(filteredEdges.size()); @@ -637,7 +700,8 @@ private void setHasDownstream(AtlasLineageOnDemandContext atlasLineageOnDemandCo } private void setHasUpstream(AtlasLineageOnDemandContext atlasLineageOnDemandContext, AtlasVertex outVertex, LineageInfoOnDemand outLineageInfo) { - List filteredEdges = getFilteredAtlasEdges(outVertex, IN, PROCESS_OUTPUTS_EDGE, atlasLineageOnDemandContext); + String lineageType = RequestContext.get().getLineageType(); + List filteredEdges = getFilteredAtlasEdges(outVertex, IN, LINEAGE_MAP.get(lineageType)[1], atlasLineageOnDemandContext); if (!filteredEdges.isEmpty()) { outLineageInfo.setHasUpstream(true); outLineageInfo.setTotalInputRelationsCount(filteredEdges.size()); @@ -656,7 +720,7 @@ private List getFilteredAtlasEdges(AtlasVertex outVertex, AtlasEdgeDi } private boolean isEntityTraversalLimitReached(AtomicInteger entitiesTraversed) { - return entitiesTraversed.get() == getLineageMaxNodeAllowedCount(); + return entitiesTraversed.get() >= getLineageMaxNodeAllowedCount(); } @Override @@ -796,7 +860,7 @@ private AtlasLineageInfo getLineageInfo(AtlasLineageContext lineageContext, Line private AtlasLineageInfo getLineageInfoV2(AtlasLineageContext lineageContext) throws AtlasBaseException { AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("getLineageInfoV2"); - + String lineageType = RequestContext.get().getLineageType(); int depth = lineageContext.getDepth(); String guid = lineageContext.getGuid(); LineageDirection direction = lineageContext.getDirection(); @@ -823,7 +887,7 @@ private AtlasLineageInfo getLineageInfoV2(AtlasLineageContext lineageContext) th // make one hop to the next dataset vertices from process vertex and traverse with 'depth = depth - 1' if (direction == INPUT || direction == BOTH) { - Iterator processEdges = vertexEdgeCache.getEdges(processVertex, OUT, PROCESS_INPUTS_EDGE).iterator(); + Iterator processEdges = vertexEdgeCache.getEdges(processVertex, OUT, LINEAGE_MAP.get(lineageType)[0]).iterator(); List qualifyingEdges = getQualifyingProcessEdges(processEdges, lineageContext); ret.setHasChildrenForDirection(getGuid(processVertex), new LineageChildrenInfo(INPUT, hasMoreChildren(qualifyingEdges))); @@ -838,7 +902,7 @@ private AtlasLineageInfo getLineageInfoV2(AtlasLineageContext lineageContext) th } if (direction == OUTPUT || direction == BOTH) { - Iterator processEdges = vertexEdgeCache.getEdges(processVertex, OUT, PROCESS_OUTPUTS_EDGE).iterator(); + Iterator processEdges = vertexEdgeCache.getEdges(processVertex, OUT, LINEAGE_MAP.get(lineageType)[1]).iterator(); List qualifyingEdges = getQualifyingProcessEdges(processEdges, lineageContext); ret.setHasChildrenForDirection(getGuid(processVertex), new LineageChildrenInfo(OUTPUT, hasMoreChildren(qualifyingEdges))); @@ -877,9 +941,9 @@ private void addEdgeToResult(AtlasEdge edge, AtlasLineageInfo lineageInfo, } } - private void addEdgeToResult(AtlasEdge edge, AtlasLineageOnDemandInfo lineageInfo, AtlasLineageOnDemandContext atlasLineageOnDemandContext) throws AtlasBaseException { + private void addEdgeToResult(AtlasEdge edge, AtlasLineageOnDemandInfo lineageInfo, AtlasLineageOnDemandContext atlasLineageOnDemandContext, int level, AtomicInteger traversalOrder) throws AtlasBaseException { if (!lineageContainsVisitedEdgeV2(lineageInfo, edge)) { - processEdge(edge, lineageInfo, atlasLineageOnDemandContext); + processEdge(edge, lineageInfo, atlasLineageOnDemandContext, level, traversalOrder); } } @@ -888,12 +952,13 @@ private int getLineageMaxNodeAllowedCount() { } private String getEdgeLabel(AtlasEdge edge) { + String lineageType = RequestContext.get().getLineageType(); AtlasVertex inVertex = edge.getInVertex(); AtlasVertex outVertex = edge.getOutVertex(); String inGuid = AtlasGraphUtilsV2.getIdFromVertex(inVertex); String outGuid = AtlasGraphUtilsV2.getIdFromVertex(outVertex); String relationGuid = AtlasGraphUtilsV2.getEncodedProperty(edge, RELATIONSHIP_GUID_PROPERTY_KEY, String.class); - boolean isInputEdge = edge.getLabel().equalsIgnoreCase(PROCESS_INPUTS_EDGE); + boolean isInputEdge = edge.getLabel().equalsIgnoreCase(LINEAGE_MAP.get(lineageType)[0]); if (isLineageOnDemandEnabled()) { return getEdgeLabelFromGuids(isInputEdge, inGuid, outGuid); @@ -1064,10 +1129,10 @@ private List> getUnvisitedProcessEdgesWithOutputVertexId lineageContext.getIgnoredProcesses().contains(processVertex.getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY, String.class))) { return Collections.emptyList(); } - + String lineageType = RequestContext.get().getLineageType(); List> unvisitedProcessEdgesWithOutputVertexIds = new ArrayList<>(); - Iterable outgoingEdges = vertexEdgeCache.getEdges(processVertex, OUT, isInput ? PROCESS_INPUTS_EDGE : PROCESS_OUTPUTS_EDGE); + Iterable outgoingEdges = vertexEdgeCache.getEdges(processVertex, OUT, isInput ? LINEAGE_MAP.get(lineageType)[0] : LINEAGE_MAP.get(lineageType)[1]); for (AtlasEdge outgoingEdge : outgoingEdges) { AtlasVertex outputVertex = outgoingEdge.getInVertex(); @@ -1154,7 +1219,8 @@ private void addLimitlessVerticesToResult(boolean isInput, int depth, Set processEdges = vertexEdgeCache.getEdges(currentVertex, IN, isInput ? PROCESS_OUTPUTS_EDGE : PROCESS_INPUTS_EDGE); + String lineageType = RequestContext.get().getLineageType(); + List processEdges = vertexEdgeCache.getEdges(currentVertex, IN, isInput ? LINEAGE_MAP.get(lineageType)[1] : LINEAGE_MAP.get(lineageType)[0]); // Filter lineages based on ignored process types processEdges = CollectionUtils.isNotEmpty(lineageContext.getIgnoredProcesses()) ? @@ -1175,8 +1241,9 @@ private void processLastLevel(AtlasVertex currentVertex, boolean isInput, AtlasL private boolean childHasOnlySelfCycle(AtlasVertex processVertex, AtlasVertex currentVertex, boolean isInput) { AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("childHasSelfCycle"); + String lineageType = RequestContext.get().getLineageType(); Iterator processEdgeIterator; - processEdgeIterator = processVertex.getEdges(OUT, isInput ? PROCESS_INPUTS_EDGE : PROCESS_OUTPUTS_EDGE).iterator(); + processEdgeIterator = processVertex.getEdges(OUT, isInput ? LINEAGE_MAP.get(lineageType)[0] : LINEAGE_MAP.get(lineageType)[1]).iterator(); Set processOutputEdges = new HashSet<>(); while (processEdgeIterator.hasNext()) { processOutputEdges.add(processEdgeIterator.next()); @@ -1192,8 +1259,8 @@ private List getEdgesOfProcess(boolean isInput, AtlasLineageContext l lineageContext.getIgnoredProcesses().contains(processVertex.getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY, String.class))) { return Collections.emptyList(); } - - return vertexEdgeCache.getEdges(processVertex, OUT, isInput ? PROCESS_INPUTS_EDGE : PROCESS_OUTPUTS_EDGE) + String lineageType = RequestContext.get().getLineageType(); + return vertexEdgeCache.getEdges(processVertex, OUT, isInput ? LINEAGE_MAP.get(lineageType)[0] : LINEAGE_MAP.get(lineageType)[1]) .stream() .filter(edge -> shouldProcessEdge(lineageContext, edge) && vertexMatchesEvaluation(edge.getInVertex(), lineageContext)) .sorted(Comparator.comparing(edge -> edge.getProperty("_r__guid", String.class))) @@ -1218,8 +1285,9 @@ private boolean shouldProcessEdge(AtlasLineageContext lineageContext, AtlasEdge } private List getEdgesOfCurrentVertex(AtlasVertex currentVertex, boolean isInput, AtlasLineageContext lineageContext) { + String lineageType = RequestContext.get().getLineageType(); return vertexEdgeCache - .getEdges(currentVertex, IN, isInput ? PROCESS_OUTPUTS_EDGE : PROCESS_INPUTS_EDGE) + .getEdges(currentVertex, IN, isInput ? LINEAGE_MAP.get(lineageType)[1] : LINEAGE_MAP.get(lineageType)[0]) .stream() .sorted(Comparator.comparing(edge -> edge.getProperty("_r__guid", String.class))) .filter(edge -> shouldProcessEdge(lineageContext, edge)) @@ -1308,7 +1376,7 @@ private boolean processVirtualEdge(final AtlasEdge incomingEdge, final AtlasEdge AtlasLineageContext lineageContext) throws AtlasBaseException { final Map entities = lineageInfo.getGuidEntityMap(); final Set relations = lineageInfo.getRelations(); - + String lineageType = RequestContext.get().getLineageType(); AtlasVertex inVertex = incomingEdge.getInVertex(); AtlasVertex outVertex = outgoingEdge.getInVertex(); AtlasVertex processVertex = outgoingEdge.getOutVertex(); @@ -1316,7 +1384,7 @@ private boolean processVirtualEdge(final AtlasEdge incomingEdge, final AtlasEdge String outGuid = AtlasGraphUtilsV2.getIdFromVertex(outVertex); String processGuid = AtlasGraphUtilsV2.getIdFromVertex(processVertex); String relationGuid = null; - boolean isInputEdge = incomingEdge.getLabel().equalsIgnoreCase(PROCESS_INPUTS_EDGE); + boolean isInputEdge = incomingEdge.getLabel().equalsIgnoreCase(LINEAGE_MAP.get(lineageType)[0]); if (!entities.containsKey(inGuid)) { AtlasEntityHeader entityHeader = entityRetriever.toAtlasEntityHeader(inVertex, lineageContext.getAttributes()); @@ -1347,7 +1415,7 @@ private boolean processVirtualEdge(final AtlasEdge incomingEdge, final AtlasEdge private void processEdges(final AtlasEdge incomingEdge, AtlasEdge outgoingEdge, AtlasLineageInfo lineageInfo, AtlasLineageContext lineageContext) throws AtlasBaseException { AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("processEdges"); - + String lineageType = RequestContext.get().getLineageType(); final Map entities = lineageInfo.getGuidEntityMap(); final Set relations = lineageInfo.getRelations(); @@ -1375,14 +1443,14 @@ private void processEdges(final AtlasEdge incomingEdge, AtlasEdge outgoingEdge, } String relationGuid = AtlasGraphUtilsV2.getEncodedProperty(incomingEdge, RELATIONSHIP_GUID_PROPERTY_KEY, String.class); - if (incomingEdge.getLabel().equalsIgnoreCase(PROCESS_INPUTS_EDGE)) { + if (incomingEdge.getLabel().equalsIgnoreCase(LINEAGE_MAP.get(lineageType)[0])) { relations.add(new LineageRelation(leftGuid, processGuid, relationGuid)); } else { relations.add(new LineageRelation(processGuid, leftGuid, relationGuid)); } relationGuid = AtlasGraphUtilsV2.getEncodedProperty(outgoingEdge, RELATIONSHIP_GUID_PROPERTY_KEY, String.class); - if (outgoingEdge.getLabel().equalsIgnoreCase(PROCESS_INPUTS_EDGE)) { + if (outgoingEdge.getLabel().equalsIgnoreCase(LINEAGE_MAP.get(lineageType)[0])) { relations.add(new LineageRelation(rightGuid, processGuid, relationGuid)); } else { relations.add(new LineageRelation(processGuid, rightGuid, relationGuid)); @@ -1393,7 +1461,7 @@ private void processEdges(final AtlasEdge incomingEdge, AtlasEdge outgoingEdge, private void processEdge(final AtlasEdge edge, AtlasLineageInfo lineageInfo, AtlasLineageContext lineageContext) throws AtlasBaseException { AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("processEdge"); - + String lineageType = RequestContext.get().getLineageType(); final Map entities = lineageInfo.getGuidEntityMap(); final Set relations = lineageInfo.getRelations(); @@ -1402,7 +1470,7 @@ private void processEdge(final AtlasEdge edge, AtlasLineageInfo lineageInfo, String inGuid = AtlasGraphUtilsV2.getIdFromVertex(inVertex); String outGuid = AtlasGraphUtilsV2.getIdFromVertex(outVertex); String relationGuid = AtlasGraphUtilsV2.getEncodedProperty(edge, RELATIONSHIP_GUID_PROPERTY_KEY, String.class); - boolean isInputEdge = edge.getLabel().equalsIgnoreCase(PROCESS_INPUTS_EDGE); + boolean isInputEdge = edge.getLabel().equalsIgnoreCase(LINEAGE_MAP.get(lineageType)[0]); if (!entities.containsKey(inGuid)) { AtlasEntityHeader entityHeader = entityRetriever.toAtlasEntityHeaderWithClassifications(inVertex, lineageContext.getAttributes()); @@ -1425,12 +1493,13 @@ private void processEdge(final AtlasEdge edge, AtlasLineageInfo lineageInfo, private void processEdge(final AtlasEdge edge, final Map entities, final Set relations, AtlasLineageContext lineageContext) throws AtlasBaseException { //Backward compatibility method + String lineageType = RequestContext.get().getLineageType(); AtlasVertex inVertex = edge.getInVertex(); AtlasVertex outVertex = edge.getOutVertex(); String inGuid = AtlasGraphUtilsV2.getIdFromVertex(inVertex); String outGuid = AtlasGraphUtilsV2.getIdFromVertex(outVertex); String relationGuid = AtlasGraphUtilsV2.getEncodedProperty(edge, RELATIONSHIP_GUID_PROPERTY_KEY, String.class); - boolean isInputEdge = edge.getLabel().equalsIgnoreCase(PROCESS_INPUTS_EDGE); + boolean isInputEdge = edge.getLabel().equalsIgnoreCase(LINEAGE_MAP.get(lineageType)[0]); if (!entities.containsKey(inGuid)) { AtlasEntityHeader entityHeader = entityRetriever.toAtlasEntityHeaderWithClassifications(inVertex, lineageContext.getAttributes()); @@ -1449,26 +1518,44 @@ private void processEdge(final AtlasEdge edge, final Map entities, final Set relations, final Set visitedEdges, final Set attributes) throws AtlasBaseException { + private void processEdge(final AtlasEdge edge, final Map entities, final Set relations, final Set visitedEdges, final Set attributes, int level, AtomicInteger traversalOrder) throws AtlasBaseException { AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processEdge"); + String lineageType = RequestContext.get().getLineageType(); AtlasVertex inVertex = edge.getInVertex(); AtlasVertex outVertex = edge.getOutVertex(); + + String inTypeName = AtlasGraphUtilsV2.getTypeName(inVertex); + AtlasEntityType inEntityType = atlasTypeRegistry.getEntityTypeByName(inTypeName); + if (inEntityType == null) { + throw new AtlasBaseException(AtlasErrorCode.TYPE_NAME_NOT_FOUND, inTypeName); + } + boolean inIsProcess = inEntityType.getTypeAndAllSuperTypes().contains(PROCESS_SUPER_TYPE); + String inGuid = AtlasGraphUtilsV2.getIdFromVertex(inVertex); String outGuid = AtlasGraphUtilsV2.getIdFromVertex(outVertex); String relationGuid = AtlasGraphUtilsV2.getEncodedProperty(edge, RELATIONSHIP_GUID_PROPERTY_KEY, String.class); - boolean isInputEdge = edge.getLabel().equalsIgnoreCase(PROCESS_INPUTS_EDGE); - + boolean isInputEdge = edge.getLabel().equalsIgnoreCase(LINEAGE_MAP.get(lineageType)[0]); if (!entities.containsKey(inGuid)) { AtlasEntityHeader entityHeader = entityRetriever.toAtlasEntityHeader(inVertex, attributes); + if (!inIsProcess) { + entityHeader.setDepth(level); + entityHeader.setTraversalOrder(traversalOrder.get()); + } + entities.put(inGuid, entityHeader); } if (!entities.containsKey(outGuid)) { AtlasEntityHeader entityHeader = entityRetriever.toAtlasEntityHeader(outVertex, attributes); + if (inIsProcess) { + entityHeader.setDepth(level); + entityHeader.setTraversalOrder(traversalOrder.get()); + } + entities.put(outGuid, entityHeader); } if (isInputEdge) { @@ -1500,13 +1587,13 @@ private String getLineageQuery(String entityGuid, LineageDirection direction, in String incomingFrom = null; String outgoingTo = null; String ret; - + String lineageType = RequestContext.get().getLineageType(); if (direction.equals(INPUT)) { - incomingFrom = PROCESS_OUTPUTS_EDGE; - outgoingTo = PROCESS_INPUTS_EDGE; + incomingFrom = LINEAGE_MAP.get(lineageType)[1]; + outgoingTo = LINEAGE_MAP.get(lineageType)[0]; } else if (direction.equals(OUTPUT)) { - incomingFrom = PROCESS_INPUTS_EDGE; - outgoingTo = PROCESS_OUTPUTS_EDGE; + incomingFrom = LINEAGE_MAP.get(lineageType)[0]; + outgoingTo = LINEAGE_MAP.get(lineageType)[1]; } bindings.put("guid", entityGuid); diff --git a/repository/src/main/java/org/apache/atlas/discovery/JsonToElasticsearchQuery.java b/repository/src/main/java/org/apache/atlas/discovery/JsonToElasticsearchQuery.java new file mode 100644 index 0000000000..ad39901a04 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/discovery/JsonToElasticsearchQuery.java @@ -0,0 +1,66 @@ +package org.apache.atlas.discovery; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.atlas.RequestContext; +import org.apache.atlas.utils.AtlasPerfMetrics; + +public class JsonToElasticsearchQuery { + + public static JsonNode convertConditionToQuery(String condition, JsonNode criterion, ObjectMapper mapper) { + if (condition.equals("AND")) { + return mapper.createObjectNode().set("bool", mapper.createObjectNode().set("filter", mapper.createArrayNode())); + } else if (condition.equals("OR")) { + JsonNode node = mapper.createObjectNode().set("bool", mapper.createObjectNode()); + return mapper.createObjectNode().set("bool", mapper.createObjectNode().set("should", mapper.createArrayNode())); + } else { + throw new IllegalArgumentException("Unsupported condition: " + condition); + } + } + + public static JsonNode convertJsonToQuery(JsonNode data, ObjectMapper mapper) { + AtlasPerfMetrics.MetricRecorder convertJsonToQueryMetrics = RequestContext.get().startMetricRecord("convertJsonToQuery"); + String condition = data.get("condition").asText(); + JsonNode criterion = data.get("criterion"); + + JsonNode query = convertConditionToQuery(condition, criterion, mapper); + + for (JsonNode crit : criterion) { + if (crit.has("condition")) { + JsonNode nestedQuery = convertJsonToQuery(crit, mapper); + if (condition.equals("AND")) { + ((com.fasterxml.jackson.databind.node.ArrayNode) query.get("bool").get("filter")).add(nestedQuery); + } else { + ((com.fasterxml.jackson.databind.node.ArrayNode) query.get("bool").get("should")).add(nestedQuery); + } + } else { + String operator = crit.get("operator").asText(); + String attributeName = crit.get("attributeName").asText(); + String attributeValue = crit.get("attributeValue").asText(); + + if (operator.equals("EQUALS")) { + com.fasterxml.jackson.databind.node.ObjectNode termNode = ((com.fasterxml.jackson.databind.node.ArrayNode) query.get("bool").get(condition.equals("AND") ? "filter" : "should")).addObject(); + termNode.putObject("term").put(attributeName, attributeValue); + } else if (operator.equals("NOT_EQUALS")) { + com.fasterxml.jackson.databind.node.ObjectNode termNode = ((com.fasterxml.jackson.databind.node.ArrayNode) query.get("bool").get(condition.equals("AND") ? "filter" : "should")).addObject(); + termNode.putObject("bool").putObject("must_not").putObject("term").put(attributeName, attributeValue); + } else if (operator.equals("STARTS_WITH")) { + com.fasterxml.jackson.databind.node.ObjectNode wildcardNode = ((com.fasterxml.jackson.databind.node.ArrayNode) query.get("bool").get(condition.equals("AND") ? "filter" : "should")).addObject(); + wildcardNode.putObject("wildcard").put(attributeName, attributeValue + "*"); + } else if (operator.equals("ENDS_WITH")) { + com.fasterxml.jackson.databind.node.ObjectNode wildcardNode = ((com.fasterxml.jackson.databind.node.ArrayNode) query.get("bool").get(condition.equals("AND") ? "filter" : "should")).addObject(); + wildcardNode.putObject("wildcard").put(attributeName, "*" + attributeValue); + } else if (operator.equals("IN")) { + com.fasterxml.jackson.databind.node.ObjectNode termsNode = ((com.fasterxml.jackson.databind.node.ArrayNode) query.get("bool").get(condition.equals("AND") ? "filter" : "should")).addObject(); + termsNode.putObject("terms").set(attributeName, crit.get("attributeValue")); + } else if (operator.equals("NOT_IN")) { + com.fasterxml.jackson.databind.node.ObjectNode termsNode = ((com.fasterxml.jackson.databind.node.ArrayNode) query.get("bool").get(condition.equals("AND") ? "filter" : "should")).addObject(); + termsNode.putObject("bool").putObject("must_not").putObject("terms").put(attributeName, crit.get("attributeValue")); + } + + } + } + RequestContext.get().endMetricRecord(convertJsonToQueryMetrics); + return query; + } +} diff --git a/repository/src/main/java/org/apache/atlas/discovery/UsersGroupsRolesStore.java b/repository/src/main/java/org/apache/atlas/discovery/UsersGroupsRolesStore.java new file mode 100644 index 0000000000..8cbf4df299 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/discovery/UsersGroupsRolesStore.java @@ -0,0 +1,69 @@ +package org.apache.atlas.discovery; + +import org.apache.atlas.plugin.model.RangerPolicy; +import org.apache.atlas.plugin.util.RangerRoles; +import org.apache.atlas.plugin.util.RangerUserStore; + +import java.util.List; + +public class UsersGroupsRolesStore { + + private RangerUserStore userStore; + private RangerRoles allRoles; + private List resourcePolicies; + private List tagPolicies; + private List abacPolicies; + private static UsersGroupsRolesStore usersGroupsRolesStore; + + public static UsersGroupsRolesStore getInstance() { + synchronized (UsersGroupsRolesStore.class) { + if (usersGroupsRolesStore == null) { + usersGroupsRolesStore = new UsersGroupsRolesStore(); + } + return usersGroupsRolesStore; + } + } + + public UsersGroupsRolesStore () {} + + public void setUserStore(RangerUserStore userStore) { + this.userStore = userStore; + } + + public RangerUserStore getUserStore() { + return userStore; + } + + public void setAllRoles(RangerRoles allRoles) { + this.allRoles = allRoles; + } + + public RangerRoles getAllRoles() { + return allRoles; + } + + public void setResourcePolicies(List resourcePolicies) { + this.resourcePolicies = resourcePolicies; + } + + public List getResourcePolicies() { + return resourcePolicies; + } + + public void setTagPolicies(List tagPolicies) { + this.tagPolicies = tagPolicies; + } + + public List getTagPolicies() { + return tagPolicies; + } + + public void setAbacPolicies(List abacPolicies) { + this.abacPolicies = abacPolicies; + } + + public List getAbacPolicies() { + return abacPolicies; + } + +} diff --git a/repository/src/main/java/org/apache/atlas/glossary/GlossaryService.java b/repository/src/main/java/org/apache/atlas/glossary/GlossaryService.java index f87e92498e..3192a94b6b 100644 --- a/repository/src/main/java/org/apache/atlas/glossary/GlossaryService.java +++ b/repository/src/main/java/org/apache/atlas/glossary/GlossaryService.java @@ -35,7 +35,6 @@ import org.apache.atlas.repository.graphdb.AtlasGraph; import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.ogm.DataAccess; -import org.apache.atlas.repository.store.graph.AtlasEntityStore; import org.apache.atlas.repository.store.graph.AtlasRelationshipStore; import org.apache.atlas.repository.store.graph.v2.AtlasEntityChangeNotifier; import org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2; @@ -533,7 +532,7 @@ public void assignTermToEntities(String termGuid, List rel LOG.debug("==> GlossaryService.assignTermToEntities({}, {})", termGuid, relatedObjectIds); } - AtlasGlossaryTerm glossaryTerm = dataAccess.load(getAtlasGlossaryTermSkeleton(termGuid)); + AtlasGlossaryTerm glossaryTerm = dataAccess.loadWithMinInfo(getAtlasGlossaryTermSkeleton(termGuid), true, true); glossaryTermUtils.processTermAssignments(glossaryTerm, relatedObjectIds); diff --git a/repository/src/main/java/org/apache/atlas/glossary/GlossaryTermUtils.java b/repository/src/main/java/org/apache/atlas/glossary/GlossaryTermUtils.java index 72c6b8dbd3..d7a37bfb1b 100644 --- a/repository/src/main/java/org/apache/atlas/glossary/GlossaryTermUtils.java +++ b/repository/src/main/java/org/apache/atlas/glossary/GlossaryTermUtils.java @@ -18,6 +18,7 @@ package org.apache.atlas.glossary; import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.RequestContext; import org.apache.atlas.bulkimport.BulkImportResponse; import org.apache.atlas.bulkimport.BulkImportResponse.ImportInfo; import org.apache.atlas.exception.AtlasBaseException; @@ -37,6 +38,8 @@ import org.apache.atlas.type.AtlasRelationshipType; import org.apache.atlas.type.AtlasTypeRegistry; import org.apache.atlas.util.FileUtils; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.atlas.utils.AtlasPerfTracer; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang.ArrayUtils; @@ -68,6 +71,8 @@ public class GlossaryTermUtils extends GlossaryUtils { private static final Logger LOG = LoggerFactory.getLogger(GlossaryTermUtils.class); private static final boolean DEBUG_ENABLED = LOG.isDebugEnabled(); + private static final Logger PERF_LOG = AtlasPerfTracer.getPerfLogger("utils.GlossaryTermUtils"); + private static final int INDEX_FOR_GLOSSARY_AT_RECORD = 0; private static final int INDEX_FOR_TERM_AT_RECORD = 1; @@ -98,20 +103,31 @@ public void processTermAssignments(AtlasGlossaryTerm glossaryTerm, Collection GlossaryTermUtils.processTermAssignments({}, {})", glossaryTerm, relatedObjectIds); } + AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("processTermAssignments"); + AtlasPerfTracer perf = null; + + if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "processTermAssignments"); + } + Objects.requireNonNull(glossaryTerm); - Set assignedEntities = glossaryTerm.getAssignedEntities(); + // Set assignedEntities = glossaryTerm.getAssignedEntities(); for (AtlasRelatedObjectId objectId : relatedObjectIds) { - if (CollectionUtils.isNotEmpty(assignedEntities) && assignedEntities.contains(objectId)) { - if (DEBUG_ENABLED) { - LOG.debug("Skipping already assigned entity {}", objectId); - } - continue; - } + /*** + * Discuss with @Aayush :PLT-305 + */ +// if (CollectionUtils.isNotEmpty(assignedEntities) && assignedEntities.contains(objectId)) { +// if (DEBUG_ENABLED) { +// LOG.debug("Skipping already assigned entity {}", objectId); +// } +// continue; +// } if (DEBUG_ENABLED) { LOG.debug("Assigning term guid={}, to entity guid = {}", glossaryTerm.getGuid(), objectId.getGuid()); } + createRelationship(defineTermAssignment(glossaryTerm.getGuid(), objectId)); AtlasVertex vertex = getVertexById(objectId.getGuid()); @@ -123,6 +139,8 @@ public void processTermAssignments(AtlasGlossaryTerm glossaryTerm, Collection relatedObjectIds) throws AtlasBaseException { diff --git a/repository/src/main/java/org/apache/atlas/glossary/GlossaryUtils.java b/repository/src/main/java/org/apache/atlas/glossary/GlossaryUtils.java index 0f16d0dc82..2310726b2e 100644 --- a/repository/src/main/java/org/apache/atlas/glossary/GlossaryUtils.java +++ b/repository/src/main/java/org/apache/atlas/glossary/GlossaryUtils.java @@ -55,8 +55,8 @@ public abstract class GlossaryUtils { public static final String TERM_ASSIGNMENT_ATTR_SOURCE = "source"; static final String ATLAS_GLOSSARY_TYPENAME = "AtlasGlossary"; - static final String ATLAS_GLOSSARY_TERM_TYPENAME = "AtlasGlossaryTerm"; - static final String ATLAS_GLOSSARY_CATEGORY_TYPENAME = "AtlasGlossaryCategory"; + public static final String ATLAS_GLOSSARY_TERM_TYPENAME = "AtlasGlossaryTerm"; + public static final String ATLAS_GLOSSARY_CATEGORY_TYPENAME = "AtlasGlossaryCategory"; public static final String NAME = "name"; public static final String QUALIFIED_NAME = "qualifiedName"; diff --git a/repository/src/main/java/org/apache/atlas/repository/audit/ESBasedAuditRepository.java b/repository/src/main/java/org/apache/atlas/repository/audit/ESBasedAuditRepository.java index cbab135606..2fefbc08df 100644 --- a/repository/src/main/java/org/apache/atlas/repository/audit/ESBasedAuditRepository.java +++ b/repository/src/main/java/org/apache/atlas/repository/audit/ESBasedAuditRepository.java @@ -84,6 +84,7 @@ public class ESBasedAuditRepository extends AbstractStorageBasedAuditRepository private static final String USER = "user"; private static final String DETAIL = "detail"; private static final String ENTITY = "entity"; + private static final String CLASSIFICATION_DETAIL= "classificationDetail"; private static final String bulkMetadata = String.format("{ \"index\" : { \"_index\" : \"%s\" } }%n", INDEX_NAME); /* @@ -134,7 +135,8 @@ public void putEventsV2(List events) throws AtlasBaseExcepti event.getEntityQualifiedName(), event.getEntity().getTypeName(), created, - "" + event.getEntity().getUpdateTime().getTime()); + "" + event.getEntity().getUpdateTime().getTime(), + event.getClassificationDetail()); bulkRequestBody.append(bulkMetadata); bulkRequestBody.append(bulkItem); @@ -174,7 +176,7 @@ private String getQueryTemplate(Map requestContextHeaders) { StringBuilder template = new StringBuilder(); template.append("'{'\"entityId\":\"{0}\",\"action\":\"{1}\",\"detail\":{2},\"user\":\"{3}\", \"eventKey\":\"{4}\", " + - "\"entityQualifiedName\": {5}, \"typeName\": \"{6}\",\"created\":{7}, \"timestamp\":{8}"); + "\"entityQualifiedName\": {5}, \"typeName\": \"{6}\",\"created\":{7}, \"timestamp\":{8}, \"classificationDetail\":{9}"); if (MapUtils.isNotEmpty(requestContextHeaders)) { template.append(",") @@ -226,7 +228,14 @@ private EntityAuditSearchResult getResultFromResponse(String responseString) thr EntityAuditEventV2 event = new EntityAuditEventV2(); event.setEntityId(entityGuid); event.setAction(EntityAuditEventV2.EntityAuditActionV2.fromString((String) source.get(ACTION))); - event.setDetail((Map) source.get(DETAIL)); + if (source.get(DETAIL) != null) { + if (source.get(DETAIL) instanceof Map) { + event.setDetail((Map) source.get(DETAIL)); + } + } + if (source.get(CLASSIFICATION_DETAIL) instanceof List) { + event.setClassificationDetails((List>) source.get(CLASSIFICATION_DETAIL)); + } event.setUser((String) source.get(USER)); event.setCreated((long) source.get(CREATED)); if (source.get(TIMESTAMP) != null) { diff --git a/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListenerV2.java b/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListenerV2.java index e83690774f..4f80f887a3 100644 --- a/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListenerV2.java +++ b/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListenerV2.java @@ -204,17 +204,10 @@ public void onClassificationsAdded(AtlasEntity entity, List MetricRecorder metric = RequestContext.get().startMetricRecord("entityAudit"); FixedBufferList classificationsAdded = getAuditEventsList(); - for (AtlasClassification classification : classifications) { - if (entity.getGuid().equals(classification.getEntityGuid())) { - createEvent(classificationsAdded.next(), entity, CLASSIFICATION_ADD, "Added classification: " + AtlasType.toJson(classification)); - } else { - createEvent(classificationsAdded.next(), entity, PROPAGATED_CLASSIFICATION_ADD, "Added propagated classification: " + AtlasType.toJson(classification)); - } - } - - for (EntityAuditRepository auditRepository: auditRepositories) { - auditRepository.putEventsV2(classificationsAdded.toList()); - } + Map> entityClassifications = new HashMap<>(); + Map> propagatedClassifications = new HashMap<>(); + getClassificationsFromEntity(classifications, entity, entityClassifications, propagatedClassifications); + emitAddClassificationEvent(classificationsAdded, entityClassifications, propagatedClassifications); RequestContext.get().endMetricRecord(metric); } @@ -222,23 +215,16 @@ public void onClassificationsAdded(AtlasEntity entity, List @Override public void onClassificationsAdded(List entities, List classifications, boolean forceInline) throws AtlasBaseException { + onClassificationsAdded(entities, classifications); + } + public void onClassificationsAdded(List entities, List classifications) throws AtlasBaseException { if (CollectionUtils.isNotEmpty(classifications)) { MetricRecorder metric = RequestContext.get().startMetricRecord("entityAudit"); FixedBufferList events = getAuditEventsList(); - - for (AtlasClassification classification : classifications) { - for (AtlasEntity entity : entities) { - if (entity.getGuid().equals(classification.getEntityGuid())) { - createEvent(events.next(), entity, CLASSIFICATION_ADD, "Added classification: " + AtlasType.toJson(classification)); - } else { - createEvent(events.next(), entity, PROPAGATED_CLASSIFICATION_ADD, "Added propagated classification: " + AtlasType.toJson(classification)); - } - } - } - - for (EntityAuditRepository auditRepository: auditRepositories) { - auditRepository.putEventsV2(events.toList()); - } + Map> entityClassifications = new HashMap<>(); + Map> propagatedClassifications = new HashMap<>(); + getClassificationsFromEntities(classifications, entities,entityClassifications, propagatedClassifications ); + emitAddClassificationEvent(events, entityClassifications, propagatedClassifications); RequestContext.get().endMetricRecord(metric); } @@ -251,22 +237,51 @@ public void onClassificationsUpdated(AtlasEntity entity, List events = getAuditEventsList(); String guid = entity.getGuid(); + Map> entityClassifications = new HashMap<>(); + Map> propagatedClassifications = new HashMap<>(); + getClassificationsFromEntity(classifications, entity, entityClassifications, propagatedClassifications); - for (AtlasClassification classification : classifications) { - if (guid.equals(classification.getEntityGuid())) { - createEvent(events.next(), entity, CLASSIFICATION_UPDATE, "Updated classification: " + AtlasType.toJson(classification)); - } else { + List addedClassification = new ArrayList<>(0); + List deletedClassification = new ArrayList<>(0); + List updatedClassification = new ArrayList<>(0); + + if (CollectionUtils.isNotEmpty(propagatedClassifications.get(entity))) { + propagatedClassifications.get(entity).forEach(classification -> { if (isPropagatedClassificationAdded(guid, classification)) { - createEvent(events.next(), entity, PROPAGATED_CLASSIFICATION_ADD, "Added propagated classification: " + AtlasType.toJson(classification)); + addedClassification.add(classification); } else if (isPropagatedClassificationDeleted(guid, classification)) { - createEvent(events.next(), entity, PROPAGATED_CLASSIFICATION_DELETE, "Deleted propagated classification: " + getDeleteClassificationString(classification.getTypeName())); + deletedClassification.add(new AtlasClassification(classification.getTypeName())); } else { - createEvent(events.next(), entity, PROPAGATED_CLASSIFICATION_UPDATE, "Updated propagated classification: " + AtlasType.toJson(classification)); + updatedClassification.add(classification); } - } + }); } - for (EntityAuditRepository auditRepository: auditRepositories) { + if (CollectionUtils.isNotEmpty(addedClassification)) { + EntityAuditEventV2 auditEvent = events.next(); + auditEvent.setClassificationDetail(AtlasJson.toV1Json(addedClassification)); + createEvent(auditEvent, entity, PROPAGATED_CLASSIFICATION_ADD, "Added propagated classifications: " + AtlasType.toJson(new AtlasClassification())); + } + + if (CollectionUtils.isNotEmpty(deletedClassification)) { + EntityAuditEventV2 auditEvent = events.next(); + auditEvent.setClassificationDetail(AtlasJson.toV1Json(deletedClassification)); + createEvent(auditEvent, entity, PROPAGATED_CLASSIFICATION_DELETE, "Deleted propagated classifications: " + AtlasType.toJson(new AtlasClassification())); + } + + if (CollectionUtils.isNotEmpty(updatedClassification)) { + EntityAuditEventV2 auditEvent = events.next(); + auditEvent.setClassificationDetail(AtlasJson.toV1Json(updatedClassification)); + createEvent(auditEvent, entity, PROPAGATED_CLASSIFICATION_UPDATE, "Updated propagated classifications: " + AtlasType.toJson(new AtlasClassification())); + } + + if (entityClassifications.get(entity) != null) { + EntityAuditEventV2 auditEvent = events.next(); + auditEvent.setClassificationDetail(AtlasJson.toV1Json(entityClassifications.get(entity))); + createEvent(auditEvent, entity, CLASSIFICATION_UPDATE, "Updated classifications: " + AtlasType.toJson(new AtlasClassification())); + } + + for (EntityAuditRepository auditRepository : auditRepositories) { auditRepository.putEventsV2(events.toList()); } @@ -278,6 +293,18 @@ private String getDeleteClassificationString(String typeName) { return String.format("{\"typeName\": \"%s\"}", typeName); } + private Map getDeleteClassificationMap(String typeName) { + Map map = new HashMap<>(); + map.put("typeName", typeName); + return map; + } + + private List> getDeleteClassificationsMap(List classifications) { + return classifications.stream() + .map(classification -> Collections.singletonMap("typeName", (Object) classification.getTypeName())) + .collect(Collectors.toList()); + } + private String getLabelsString(String labels) { return String.format("{\"labels\": \"%s\"}", labels); } @@ -288,42 +315,25 @@ public void onClassificationsDeleted(AtlasEntity entity, List events = getAuditEventsList(); - - for (AtlasClassification classification : classifications) { - if (StringUtils.equals(entity.getGuid(), classification.getEntityGuid())) { - createEvent(events.next(), entity, CLASSIFICATION_DELETE, "Deleted classification: " + getDeleteClassificationString(classification.getTypeName())); - } else { - createEvent(events.next(), entity, PROPAGATED_CLASSIFICATION_DELETE, "Deleted propagated classification: " + getDeleteClassificationString(classification.getTypeName())); - } - } - - for (EntityAuditRepository auditRepository: auditRepositories) { - auditRepository.putEventsV2(events.toList()); - } + Map>> entityClassifications = new HashMap<>(); + Map>> propagatedClassifications = new HashMap<>(); + getClassificationTextFromEntity(classifications, entity, entityClassifications, propagatedClassifications); + emitDeleteClassificationEvent(events, entityClassifications, propagatedClassifications); RequestContext.get().endMetricRecord(metric); } } + @Override public void onClassificationsDeleted(List entities, List classifications) throws AtlasBaseException { if (CollectionUtils.isNotEmpty(classifications) && CollectionUtils.isNotEmpty(entities)) { MetricRecorder metric = RequestContext.get().startMetricRecord("onClassificationsDeleted"); FixedBufferList events = getAuditEventsList(); - - for (AtlasClassification classification : classifications) { - for (AtlasEntity entity : entities) { - if (StringUtils.equals(entity.getGuid(), classification.getEntityGuid())) { - createEvent(events.next(), entity, CLASSIFICATION_DELETE, "Deleted classification: " + getDeleteClassificationString(classification.getTypeName())); - } else { - createEvent(events.next(), entity, PROPAGATED_CLASSIFICATION_DELETE, "Deleted propagated classification: " + getDeleteClassificationString(classification.getTypeName())); - } - } - } - - for (EntityAuditRepository auditRepository: auditRepositories) { - auditRepository.putEventsV2(events.toList()); - } + Map>> entityClassifications = new HashMap<>(); + Map>> propagatedClassifications = new HashMap<>(); + getClassificationsTextFromEntities(classifications, entities, entityClassifications, propagatedClassifications); + emitDeleteClassificationEvent(events, entityClassifications, propagatedClassifications); RequestContext.get().endMetricRecord(metric); } @@ -337,7 +347,7 @@ public void onTermAdded(AtlasGlossaryTerm term, List entit FixedBufferList events = getAuditEventsList(); for (AtlasRelatedObjectId relatedObjectId : entities) { - AtlasEntity entity = instanceConverter.getAndCacheEntity(relatedObjectId.getGuid()); + AtlasEntity entity = instanceConverter.getAndCacheEntity(relatedObjectId.getGuid(), true); if (entity != null) { createEvent(events.next(), entity, TERM_ADD, "Added term: " + term.toAuditString()); @@ -762,22 +772,22 @@ public static String getV2AuditPrefix(EntityAuditActionV2 action) { ret = "Purged: "; break; case CLASSIFICATION_ADD: - ret = "Added classification: "; + ret = "Added classifications: "; break; case CLASSIFICATION_DELETE: - ret = "Deleted classification: "; + ret = "Deleted classifications: "; break; case CLASSIFICATION_UPDATE: - ret = "Updated classification: "; + ret = "Updated classifications: "; break; case PROPAGATED_CLASSIFICATION_ADD: - ret = "Added propagated classification: "; + ret = "Added propagated classifications: "; break; case PROPAGATED_CLASSIFICATION_DELETE: - ret = "Deleted propagated classification: "; + ret = "Deleted propagated classifications: "; break; case PROPAGATED_CLASSIFICATION_UPDATE: - ret = "Updated propagated classification: "; + ret = "Updated propagated classifications: "; break; case ENTITY_IMPORT_CREATE: ret = "Created by import: "; @@ -813,4 +823,81 @@ private FixedBufferList getAuditEventsList() { return ret; } -} + private void getClassificationsFromEntity(List classifications, AtlasEntity entity, Map> entityClassifications, Map> propagatedClassifications) { + if (entityClassifications == null) { + entityClassifications = new HashMap<>(); + } + if (propagatedClassifications == null) { + propagatedClassifications = new HashMap<>(); + } + + for (AtlasClassification classification : classifications) { + if (entity.getGuid().equals(classification.getEntityGuid())) { + entityClassifications.computeIfAbsent(entity, key -> new ArrayList<>()).add(classification); + } else { + propagatedClassifications.computeIfAbsent(entity, key -> new ArrayList<>()).add(classification); + } + } + } + + private void getClassificationsFromEntities(List classifications, List entities, Map> entityClassifications, Map> propagatedClassifications) { + for (AtlasEntity entity : entities) { + getClassificationsFromEntity(classifications, entity, entityClassifications, propagatedClassifications); + } + } + + private void emitAddClassificationEvent(FixedBufferList events, Map> entityClassifications, Map> propagatedClassifications) throws AtlasBaseException { + entityClassifications.forEach((entity, eClassifications) -> { + EntityAuditEventV2 auditEvent = events.next(); + auditEvent.setClassificationDetail(AtlasJson.toV1Json(eClassifications)); + createEvent(auditEvent, entity, CLASSIFICATION_ADD, "Added classifications: " + null); + }); + + propagatedClassifications.forEach((entity, pClassifications) -> { + EntityAuditEventV2 auditEvent = events.next(); + auditEvent.setClassificationDetail(AtlasJson.toV1Json(pClassifications)); + createEvent(auditEvent, entity, PROPAGATED_CLASSIFICATION_ADD, "Added propagated classifications: " + null); + }); + for (EntityAuditRepository auditRepository : auditRepositories) { + auditRepository.putEventsV2(events.toList()); + } + } + private void getClassificationTextFromEntity(List classifications, AtlasEntity entity, Map>> entityClassifications, Map>> propagatedClassifications) { + if (entityClassifications == null) { + entityClassifications = new HashMap<>(); + } + if (propagatedClassifications == null) { + propagatedClassifications = new HashMap<>(); + } + + for (AtlasClassification classification : classifications) { + if (entity.getGuid().equals(classification.getEntityGuid())) { + entityClassifications.computeIfAbsent(entity, key -> new ArrayList<>()).add(getDeleteClassificationMap(classification.getTypeName())); + } else { + propagatedClassifications.computeIfAbsent(entity, key -> new ArrayList<>()).add(getDeleteClassificationMap(classification.getTypeName())); + } + } + } + + private void getClassificationsTextFromEntities(List classifications, List entities, Map>> entityClassifications, Map>> propagatedClassifications) { + for (AtlasEntity entity : entities) { + getClassificationTextFromEntity(classifications, entity, entityClassifications, propagatedClassifications); + } + } + private void emitDeleteClassificationEvent(FixedBufferList events, Map>> entityClassifications, Map>> propagatedClassifications) throws AtlasBaseException { + entityClassifications.forEach((entity, eClassifications) -> { + EntityAuditEventV2 auditEvent = events.next(); + auditEvent.setClassificationDetail(AtlasJson.toV1Json(eClassifications)); + createEvent(auditEvent, entity, CLASSIFICATION_DELETE, "Deleted classifications: " + null); + }); + propagatedClassifications.forEach((entity, pClassifications) -> { + EntityAuditEventV2 auditEvent = events.next(); + auditEvent.setClassificationDetail(AtlasJson.toV1Json(pClassifications)); + createEvent(auditEvent, entity, PROPAGATED_CLASSIFICATION_DELETE, "Deleted propagated classifications: " + null); + }); + + for (EntityAuditRepository auditRepository : auditRepositories) { + auditRepository.putEventsV2(events.toList()); + } + } +} \ No newline at end of file diff --git a/repository/src/main/java/org/apache/atlas/repository/audit/StartupTimeLogger.java b/repository/src/main/java/org/apache/atlas/repository/audit/StartupTimeLogger.java new file mode 100644 index 0000000000..dc816d3ed0 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/audit/StartupTimeLogger.java @@ -0,0 +1,41 @@ +package org.apache.atlas.repository.audit; + +import org.apache.atlas.type.AtlasType; +import org.apache.atlas.utils.AtlasPerfTracer; +import org.apache.commons.logging.Log; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.ApplicationListener; +import org.springframework.context.event.ContextRefreshedEvent; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +@Component +public class StartupTimeLogger implements ApplicationListener { + private final StartupTimeLoggerBeanPostProcessor beanPostProcessor; + + private static final Logger LOG = LoggerFactory.getLogger(StartupTimeLogger.class); + + public StartupTimeLogger(StartupTimeLoggerBeanPostProcessor beanPostProcessor) { + this.beanPostProcessor = beanPostProcessor; + } + + @Override + public void onApplicationEvent(ContextRefreshedEvent event) { + // Print the startup times after all beans are loaded + printHashMapInTableFormatDescendingOrder(beanPostProcessor.getDurationTimeMap(), "creationTime"); + } + + public static void printHashMapInTableFormatDescendingOrder(Map map, String value) { + // Convert map to a list of entries + List> list = new ArrayList<>(map.entrySet()); + + // Sort the list by values in descending order + list.sort((entry1, entry2) -> entry2.getValue().compareTo(entry1.getValue())); + + LOG.info("Capturing Bean creation time {}", AtlasType.toJson(list)); + } +} \ No newline at end of file diff --git a/repository/src/main/java/org/apache/atlas/repository/audit/StartupTimeLoggerBeanPostProcessor.java b/repository/src/main/java/org/apache/atlas/repository/audit/StartupTimeLoggerBeanPostProcessor.java new file mode 100644 index 0000000000..47e010c88c --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/audit/StartupTimeLoggerBeanPostProcessor.java @@ -0,0 +1,45 @@ +package org.apache.atlas.repository.audit; + +import org.apache.atlas.utils.AtlasPerfTracer; +import org.slf4j.Logger; +import org.springframework.beans.factory.config.BeanPostProcessor; +import org.springframework.stereotype.Component; +import java.util.HashMap; +import java.util.Map; + +@Component +public class StartupTimeLoggerBeanPostProcessor implements BeanPostProcessor { + private final Map startTimeMap = new HashMap<>(); + + public Map getDurationTimeMap() { + return durationTimeMap; + } + + private final Map durationTimeMap = new HashMap<>(); + + private static final Logger PERF_LOG = AtlasPerfTracer.getPerfLogger("Beans"); + + private AtlasPerfTracer perf = null; + + @Override + public Object postProcessBeforeInitialization(Object bean, String beanName) { + // Record the start time + startTimeMap.put(bean.getClass().getName(), System.currentTimeMillis()); + if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "Beans.create(" + beanName + ")"); + } + return bean; + } + + @Override + public Object postProcessAfterInitialization(Object bean, String beanName) { + AtlasPerfTracer.log(perf); + // Calculate and log the startup time + long startTime = startTimeMap.getOrDefault(bean.getClass().getName(), -1L); + long endTime = System.currentTimeMillis(); + if (startTime != -1L) { + durationTimeMap.put(bean.getClass().getName(), endTime-startTime); + } + return bean; + } +} diff --git a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java index c1aef7b509..3978c32a80 100644 --- a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java +++ b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java @@ -36,6 +36,7 @@ import org.apache.atlas.model.legacy.EntityResult; import org.apache.atlas.repository.graphdb.AtlasGraph; import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; +import org.apache.atlas.utils.AtlasPerfMetrics; import org.apache.atlas.v1.model.instance.Referenceable; import org.apache.atlas.v1.model.instance.Struct; import org.apache.atlas.repository.converters.AtlasFormatConverter.ConverterContext; @@ -300,6 +301,7 @@ public AtlasEntity getAndCacheEntity(String guid) throws AtlasBaseException { } public AtlasEntity getAndCacheEntity(String guid, boolean ignoreRelationshipAttributes) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("getAndCacheEntity"); RequestContext context = RequestContext.get(); AtlasEntity entity = context.getEntity(guid); @@ -318,10 +320,21 @@ public AtlasEntity getAndCacheEntity(String guid, boolean ignoreRelationshipAttr } } } + RequestContext.get().endMetricRecord(recorder); return entity; } + public AtlasEntity getEntity(String guid, boolean ignoreRelationshipAttributes) throws AtlasBaseException { + AtlasEntity entity = null; + if (ignoreRelationshipAttributes) { + entity = entityGraphRetrieverIgnoreRelationshipAttrs.toAtlasEntity(guid); + } else { + entity = entityGraphRetriever.toAtlasEntity(guid); + } + return entity; + } + public AtlasEntityWithExtInfo getAndCacheEntityExtInfo(String guid) throws AtlasBaseException { RequestContext context = RequestContext.get(); diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java b/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java index 987ceec296..0f3b237147 100755 --- a/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java +++ b/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java @@ -21,6 +21,7 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; +import com.google.common.collect.Iterators; import org.apache.atlas.ApplicationProperties; import org.apache.atlas.AtlasErrorCode; import org.apache.atlas.AtlasException; @@ -56,6 +57,7 @@ import org.apache.atlas.exception.EntityNotFoundException; import org.apache.atlas.util.AttributeValueMap; import org.apache.atlas.util.IndexedInstance; +import org.apache.atlas.utils.AtlasPerfMetrics.MetricRecorder; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.IteratorUtils; import org.apache.commons.lang.StringUtils; @@ -73,6 +75,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.stream.Collectors; import static org.apache.atlas.AtlasErrorCode.RELATIONSHIP_CREATE_INVALID_PARAMS; import static org.apache.atlas.model.instance.AtlasEntity.Status.ACTIVE; @@ -376,6 +379,37 @@ public static AtlasVertex getClassificationVertex(AtlasVertex entityVertex, Stri return ret; } + public static List getAllClassificationVerticesByClassificationName(AtlasGraph graph, String classificationName) { + Iterable vertices = graph.query().has(TYPE_NAME_PROPERTY_KEY, classificationName).vertices(); + if (vertices == null) { + return Collections.emptyList(); + } + return IteratorUtils.toList(vertices.iterator()); + } + + public static List getAllAssetsWithClassificationAttached(AtlasGraph graph, String classificationName) { + Iterable classificationVertices = graph.query().has(TYPE_NAME_PROPERTY_KEY, classificationName).vertices(); + if (classificationVertices == null) { + return Collections.emptyList(); + } + List classificationVerticesList = IteratorUtils.toList(classificationVertices.iterator()); + LOG.info("classificationVerticesList size: {}", classificationVerticesList.size()); + HashSet entityVerticesSet = new HashSet<>(); + for (AtlasVertex classificationVertex : classificationVerticesList) { + Iterable attachedVertices = classificationVertex.query() + .direction(AtlasEdgeDirection.IN) + .label(CLASSIFICATION_LABEL).vertices(); + if (attachedVertices != null) { + Iterator attachedVerticesIterator = attachedVertices.iterator(); + while (attachedVerticesIterator.hasNext()) { + entityVerticesSet.add(attachedVerticesIterator.next()); + } + LOG.info("entityVerticesSet size: {}", entityVerticesSet.size()); + } + } + + return entityVerticesSet.stream().collect(Collectors.toList()); + } public static AtlasEdge getClassificationEdge(AtlasVertex entityVertex, AtlasVertex classificationVertex) { AtlasEdge ret = null; Iterable edges = entityVertex.query().direction(AtlasEdgeDirection.OUT).label(CLASSIFICATION_LABEL) @@ -394,6 +428,17 @@ public static AtlasEdge getClassificationEdge(AtlasVertex entityVertex, AtlasVer return ret; } + public static Integer getCountOfCategoryEdges(AtlasVertex entityVertex) { + + Iterator edges = getOutGoingEdgesByLabel(entityVertex, CATEGORY_TERMS_EDGE_LABEL); + + if (edges!=null) { + return Iterators.size(edges); + } + + return 0; + } + public static boolean isClassificationAttached(AtlasVertex entityVertex, AtlasVertex classificationVertex) { AtlasPerfMetrics.MetricRecorder isClassificationAttachedMetricRecorder = RequestContext.get().startMetricRecord("isClassificationAttached"); String classificationId = classificationVertex.getIdForDisplay(); @@ -508,7 +553,8 @@ public static List getPropagatedVerticesIds (AtlasVertex classificationV } public static boolean hasEntityReferences(AtlasVertex classificationVertex) { - return classificationVertex.hasEdges(AtlasEdgeDirection.IN, CLASSIFICATION_LABEL); + Iterator edgeIterator = classificationVertex.query().direction(AtlasEdgeDirection.IN).label(CLASSIFICATION_LABEL).edges(1).iterator(); + return edgeIterator != null && edgeIterator.hasNext(); } public static List getAllPropagatedEntityVertices(AtlasVertex classificationVertex) { @@ -785,7 +831,18 @@ public static List getTraitNames(AtlasVertex entityVertex) { public static List getPropagatedTraitNames(AtlasVertex entityVertex) { return getTraitNames(entityVertex, true); } - + public static List getAllTraitNamesFromAttribute(AtlasVertex entityVertex) { + List ret = new ArrayList<>(); + List traitNames = entityVertex.getMultiValuedProperty(TRAIT_NAMES_PROPERTY_KEY, String.class); + if (traitNames != null) { + ret.addAll(traitNames); + } + List propagatedTraitNames = entityVertex.getMultiValuedProperty(PROPAGATED_TRAIT_NAMES_PROPERTY_KEY, String.class); + if (propagatedTraitNames != null) { + ret.addAll(propagatedTraitNames); + } + return ret; + } public static List getAllTraitNames(AtlasVertex entityVertex) { return getTraitNames(entityVertex, null); } @@ -814,6 +871,7 @@ public static List getTraitNames(AtlasVertex entityVertex, Boolean propa } public static List getPropagatableClassifications(AtlasEdge edge) { + MetricRecorder metric = RequestContext.get().startMetricRecord("getPropagatableClassifications"); List ret = new ArrayList<>(); RequestContext requestContext = RequestContext.get(); @@ -832,7 +890,7 @@ public static List getPropagatableClassifications(AtlasEdge edge) { ret.addAll(getPropagationEnabledClassificationVertices(inVertex)); } } - + RequestContext.get().endMetricRecord(metric); return ret; } //Returns the vertex from which the tag is being propagated @@ -897,18 +955,18 @@ public static boolean propagatedClassificationAttachedToVertex(AtlasVertex class } public static List getClassificationEdges(AtlasVertex entityVertex) { - return getClassificationEdges(entityVertex, false); + return getClassificationEdges(entityVertex, false, null); } public static List getPropagatedClassificationEdges(AtlasVertex entityVertex) { - return getClassificationEdges(entityVertex, true); + return getClassificationEdges(entityVertex, true, null); } public static List getAllClassificationEdges(AtlasVertex entityVertex) { - return getClassificationEdges(entityVertex, null); + return getClassificationEdges(entityVertex, null, null); } - public static List getClassificationEdges(AtlasVertex entityVertex, Boolean propagated) { + public static List getClassificationEdges(AtlasVertex entityVertex, Boolean propagated, String typeName) { List ret = new ArrayList<>(); AtlasVertexQuery query = entityVertex.query().direction(AtlasEdgeDirection.OUT).label(CLASSIFICATION_LABEL); @@ -916,6 +974,10 @@ public static List getClassificationEdges(AtlasVertex entityVertex, B query = query.has(CLASSIFICATION_EDGE_IS_PROPAGATED_PROPERTY_KEY, propagated); } + if (StringUtils.isNotEmpty(typeName)) { + query = query.has(CLASSIFICATION_EDGE_NAME_PROPERTY_KEY, typeName); + } + Iterable edges = query.edges(); if (edges != null) { @@ -1881,7 +1943,7 @@ public static String getDelimitedClassificationNames(Collection classifi * @return Iterator of children vertices */ public static Iterator getActiveParentVertices(AtlasVertex vertex, String parentEdgeLabel) throws AtlasBaseException { - return getActiveVertices(vertex, parentEdgeLabel, AtlasEdgeDirection.IN); + return getActiveVertices(vertex, AtlasEdgeDirection.IN, parentEdgeLabel); } /** @@ -1890,11 +1952,12 @@ public static Iterator getActiveParentVertices(AtlasVertex vertex, * @param childrenEdgeLabel Edge label of children * @return Iterator of children vertices */ - public static Iterator getActiveChildrenVertices(AtlasVertex vertex, String childrenEdgeLabel) throws AtlasBaseException { - return getActiveVertices(vertex, childrenEdgeLabel, AtlasEdgeDirection.OUT); + + public static Iterator getActiveChildrenVertices(AtlasVertex vertex, String... childrenEdgeLabel) throws AtlasBaseException { + return getActiveVertices(vertex, AtlasEdgeDirection.OUT, childrenEdgeLabel); } - public static Iterator getActiveVertices(AtlasVertex vertex, String childrenEdgeLabel, AtlasEdgeDirection direction) throws AtlasBaseException { + public static Iterator getActiveVertices(AtlasVertex vertex, AtlasEdgeDirection direction, String... childrenEdgeLabel) throws AtlasBaseException { AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("CategoryPreProcessor.getEdges"); try { @@ -1913,6 +1976,28 @@ public static Iterator getActiveVertices(AtlasVertex vertex, String } } + public static Iterator getAllChildrenVertices(AtlasVertex vertex, String childrenEdgeLabel) throws AtlasBaseException { + return getAllVertices(vertex, childrenEdgeLabel, AtlasEdgeDirection.OUT); + } + + public static Iterator getAllVertices(AtlasVertex vertex, String childrenEdgeLabel, AtlasEdgeDirection direction) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("CategoryPreProcessor.getEdges"); + + try { + return vertex.query() + .direction(direction) + .label(childrenEdgeLabel) + .vertices() + .iterator(); + } catch (Exception e) { + LOG.error("Error while getting all children of category for edge label " + childrenEdgeLabel, e); + throw new AtlasBaseException(AtlasErrorCode.INTERNAL_ERROR, e); + } + finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + private static Set parseLabelsString(String labels) { Set ret = new HashSet<>(); diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/IndexRecoveryService.java b/repository/src/main/java/org/apache/atlas/repository/graph/IndexRecoveryService.java index c39876d19c..f6154727ac 100644 --- a/repository/src/main/java/org/apache/atlas/repository/graph/IndexRecoveryService.java +++ b/repository/src/main/java/org/apache/atlas/repository/graph/IndexRecoveryService.java @@ -21,7 +21,6 @@ import org.apache.atlas.ApplicationProperties; import org.apache.atlas.AtlasConstants; import org.apache.atlas.AtlasException; -import org.apache.atlas.RequestContext; import org.apache.atlas.ha.HAConfiguration; import org.apache.atlas.listener.ActiveStateChangeHandler; import org.apache.atlas.repository.graphdb.AtlasGraph; @@ -182,6 +181,7 @@ public void run() { continue; } boolean indexHealthy = isIndexHealthy(); + if (this.txRecoveryObject == null && indexHealthy) { startMonitoring(); } @@ -228,7 +228,6 @@ private void startMonitoring() { try { startTime = recoveryInfoManagement.getStartTime(); Instant newStartTime = Instant.now(); - this.graph.setEnableCache(false); txRecoveryObject = this.graph.getManagementSystem().startIndexRecovery(startTime); recoveryInfoManagement.updateStartTime(newStartTime.toEpochMilli()); diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/SolrIndexHelper.java b/repository/src/main/java/org/apache/atlas/repository/graph/SolrIndexHelper.java index 401bc024a5..0d349382e9 100644 --- a/repository/src/main/java/org/apache/atlas/repository/graph/SolrIndexHelper.java +++ b/repository/src/main/java/org/apache/atlas/repository/graph/SolrIndexHelper.java @@ -78,6 +78,8 @@ public void onChange(ChangedTypeDefs changedTypeDefs) { return; } + LOG.info("SolrIndexHelper:initializationCompleted: {}", initializationCompleted); + if(initializationCompleted) { try { AtlasGraph graph = AtlasGraphProvider.getGraphInstance(); diff --git a/repository/src/main/java/org/apache/atlas/repository/impexp/ImportService.java b/repository/src/main/java/org/apache/atlas/repository/impexp/ImportService.java index 344e44ca06..ff4231d5de 100644 --- a/repository/src/main/java/org/apache/atlas/repository/impexp/ImportService.java +++ b/repository/src/main/java/org/apache/atlas/repository/impexp/ImportService.java @@ -31,6 +31,7 @@ import org.apache.atlas.model.typedef.AtlasTypesDef; import org.apache.atlas.repository.store.graph.BulkImporter; import org.apache.atlas.repository.store.graph.v2.EntityImportStream; +import org.apache.atlas.repository.util.FilterUtil; import org.apache.atlas.store.AtlasTypeDefStore; import org.apache.atlas.type.AtlasType; import org.apache.atlas.type.AtlasTypeRegistry; @@ -47,6 +48,8 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.List; import static org.apache.atlas.model.impexp.AtlasImportRequest.TRANSFORMERS_KEY; @@ -191,7 +194,9 @@ public AtlasImportResult run(AtlasImportRequest request, String userName, String if (StringUtils.isBlank(fileName)) { throw new AtlasBaseException(AtlasErrorCode.INVALID_PARAMETERS, "FILENAME parameter not found"); } - + if(!FilterUtil.validateFilePath(fileName)){ + throw new AtlasBaseException(AtlasErrorCode.INVALID_PARAMETERS, "FILENAME IS INVALID"); + } AtlasImportResult result = null; try { LOG.info("==> import(user={}, from={}, fileName={})", userName, requestingIP, fileName); @@ -296,4 +301,5 @@ boolean checkHiveTableIncrementalSkipLineage(AtlasImportRequest importRequest, A private boolean isMigrationMode(AtlasImportRequest request) { return request.getOptions().containsKey(AtlasImportRequest.OPTION_KEY_MIGRATION); } + } diff --git a/repository/src/main/java/org/apache/atlas/repository/impexp/MigrationProgressService.java b/repository/src/main/java/org/apache/atlas/repository/impexp/MigrationProgressService.java index 6bb5f1e221..35c01ccac0 100644 --- a/repository/src/main/java/org/apache/atlas/repository/impexp/MigrationProgressService.java +++ b/repository/src/main/java/org/apache/atlas/repository/impexp/MigrationProgressService.java @@ -22,6 +22,7 @@ import org.apache.atlas.ApplicationProperties; import org.apache.atlas.AtlasException; import org.apache.atlas.annotation.AtlasService; +import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.impexp.MigrationStatus; import org.apache.atlas.repository.graph.AtlasGraphProvider; import org.apache.atlas.repository.graphdb.GraphDBMigrator; @@ -55,7 +56,7 @@ public class MigrationProgressService { private boolean zipFileBasedMigrationImport; @Inject - public MigrationProgressService(Configuration configuration, GraphDBMigrator migrator) { + public MigrationProgressService(Configuration configuration, GraphDBMigrator migrator) throws AtlasBaseException { this.migrator = migrator; this.cacheValidity = (configuration != null) ? configuration.getLong(MIGRATION_QUERY_CACHE_TTL, DEFAULT_CACHE_TTL_IN_SECS) : DEFAULT_CACHE_TTL_IN_SECS; @@ -63,7 +64,7 @@ public MigrationProgressService(Configuration configuration, GraphDBMigrator mig initConditionallyZipFileBasedMigrator(); } - private void initConditionallyZipFileBasedMigrator() { + private void initConditionallyZipFileBasedMigrator() throws AtlasBaseException { if (!zipFileBasedMigrationImport) { return; } diff --git a/repository/src/main/java/org/apache/atlas/repository/migration/DataMigrationStatusService.java b/repository/src/main/java/org/apache/atlas/repository/migration/DataMigrationStatusService.java index 3d357ddcfa..852c645084 100644 --- a/repository/src/main/java/org/apache/atlas/repository/migration/DataMigrationStatusService.java +++ b/repository/src/main/java/org/apache/atlas/repository/migration/DataMigrationStatusService.java @@ -18,22 +18,28 @@ package org.apache.atlas.repository.migration; +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.migration.MigrationImportStatus; import org.apache.atlas.repository.Constants; import org.apache.atlas.repository.graph.AtlasGraphProvider; import org.apache.atlas.repository.graphdb.AtlasGraph; import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2; +import org.apache.atlas.repository.util.FilterUtil; import org.apache.commons.codec.digest.DigestUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.FileInputStream; import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.Date; import static org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2.getEncodedProperty; import static org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2.setEncodedProperty; +import static org.apache.atlas.repository.util.FilterUtil.validateFilePath; import static org.apache.atlas.type.AtlasStructType.AtlasAttribute.encodePropertyKey; import static org.apache.atlas.type.Constants.INTERNAL_PROPERTY_KEY_PREFIX; @@ -52,8 +58,11 @@ public DataMigrationStatusService(AtlasGraph graph) { } - public void init(String fileToImport) { + public void init(String fileToImport) throws AtlasBaseException { try { + if(!validateFilePath(fileToImport)){ + throw new AtlasBaseException(AtlasErrorCode.INVALID_PARAMETERS, "File Path is invalid"); + } this.status = new MigrationImportStatus(fileToImport, DigestUtils.md5Hex(new FileInputStream(fileToImport))); } catch (IOException e) { LOG.error("Not able to create Migration status", e); @@ -66,9 +75,13 @@ public void init(String fileToImport) { getCreate(fileToImport); } - public MigrationImportStatus getCreate(String fileName) { + + public MigrationImportStatus getCreate(String fileName) throws AtlasBaseException { MigrationImportStatus create = null; try { + if(!validateFilePath(fileName)){ + throw new AtlasBaseException(AtlasErrorCode.INVALID_PARAMETERS, "File Path is invalid"); + } create = getCreate(new MigrationImportStatus(fileName, DigestUtils.md5Hex(new FileInputStream(fileName)))); } catch (IOException e) { LOG.error("Exception occurred while creating migration import", e); diff --git a/repository/src/main/java/org/apache/atlas/repository/ogm/DataAccess.java b/repository/src/main/java/org/apache/atlas/repository/ogm/DataAccess.java index ea345885ec..7fb04ca9d1 100644 --- a/repository/src/main/java/org/apache/atlas/repository/ogm/DataAccess.java +++ b/repository/src/main/java/org/apache/atlas/repository/ogm/DataAccess.java @@ -28,6 +28,8 @@ import org.apache.atlas.repository.store.graph.v2.AtlasEntityStream; import org.apache.atlas.DeleteType; import org.apache.atlas.utils.AtlasPerfTracer; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.atlas.utils.AtlasPerfTracer; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; @@ -136,13 +138,17 @@ public Iterable load(final Iterable objec } } + public T loadWithMinInfo(T obj, boolean isMinExtInfo, boolean ignoreRelationShip) throws AtlasBaseException { + return load(obj, false, true, true); + } public T load(T obj) throws AtlasBaseException { - return load(obj, false); + return load(obj, false, false, false); } - public T load(T obj, boolean loadDeleted) throws AtlasBaseException { + public T load(T obj, boolean loadDeleted, boolean isMinExtInfo, boolean ignoreRelationShip) throws AtlasBaseException { Objects.requireNonNull(obj, "Can't load a null object"); + AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("DataAccess.load()"); AtlasPerfTracer perf = null; try { @@ -160,16 +166,24 @@ public T load(T obj, boolean loadDeleted) throw if (LOG.isDebugEnabled()) { LOG.debug("Load using GUID"); } - entityWithExtInfo = entityStore.getById(guid); + if (isMinExtInfo && ignoreRelationShip) { + entityWithExtInfo = entityStore.getById(guid, true, true); + } else { + entityWithExtInfo = entityStore.getById(guid); + } } else { if (LOG.isDebugEnabled()) { LOG.debug("Load using unique attributes"); } - entityWithExtInfo = entityStore.getByUniqueAttributes(dto.getEntityType(), dto.getUniqueAttributes(obj)); + if (isMinExtInfo && ignoreRelationShip) { + entityWithExtInfo = entityStore.getByUniqueAttributes(dto.getEntityType(), dto.getUniqueAttributes(obj), true, true); + } else { + entityWithExtInfo = entityStore.getByUniqueAttributes(dto.getEntityType(), dto.getUniqueAttributes(obj)); + } } // Since GUID alone can't be used to determine what ENTITY TYPE is loaded from the graph - String actualTypeName = entityWithExtInfo.getEntity().getTypeName(); + String actualTypeName = entityWithExtInfo.getEntity().getTypeName(); String expectedTypeName = dto.getEntityType().getTypeName(); if (!actualTypeName.equals(expectedTypeName)) { throw new AtlasBaseException(AtlasErrorCode.UNEXPECTED_TYPE, expectedTypeName, actualTypeName); @@ -182,9 +196,9 @@ public T load(T obj, boolean loadDeleted) throw return dto.from(entityWithExtInfo); } finally { + RequestContext.get().endMetricRecord(metric); AtlasPerfTracer.log(perf); } - } public T load(String guid, Class clazz) throws AtlasBaseException { diff --git a/repository/src/main/java/org/apache/atlas/repository/patches/AtlasPatchRegistry.java b/repository/src/main/java/org/apache/atlas/repository/patches/AtlasPatchRegistry.java index d9ae5800e4..1a90968b83 100644 --- a/repository/src/main/java/org/apache/atlas/repository/patches/AtlasPatchRegistry.java +++ b/repository/src/main/java/org/apache/atlas/repository/patches/AtlasPatchRegistry.java @@ -41,6 +41,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; import static org.apache.atlas.model.patches.AtlasPatch.PatchStatus.FAILED; import static org.apache.atlas.model.patches.AtlasPatch.PatchStatus.UNKNOWN; @@ -66,9 +67,9 @@ public AtlasPatchRegistry(AtlasGraph graph) { LOG.info("AtlasPatchRegistry: found {} patches", patchNameStatusMap.size()); - for (Map.Entry entry : patchNameStatusMap.entrySet()) { - LOG.info("AtlasPatchRegistry: patchId={}, status={}", entry.getKey(), entry.getValue()); - } +// for (Map.Entry entry : patchNameStatusMap.entrySet()) { +// LOG.info("AtlasPatchRegistry: patchId={}, status={}", entry.getKey(), entry.getValue()); +// } } public boolean isApplicable(String incomingId, String patchFile, int index) { @@ -146,13 +147,12 @@ private void createOrUpdatePatchVertex(AtlasGraph graph, String patchId, String setEncodedProperty(patchVertex, MODIFIED_BY_KEY, AtlasTypeDefGraphStoreV2.getCurrentUser()); } finally { graph.commit(); - patchNameStatusMap.put(patchId, patchStatus); } } private static Map getPatchNameStatusForAllRegistered(AtlasGraph graph) { - Map ret = new HashMap<>(); + Map ret = new ConcurrentHashMap<>(); AtlasPatches patches = getAllPatches(graph); for (AtlasPatch patch : patches.getPatches()) { diff --git a/repository/src/main/java/org/apache/atlas/repository/store/aliasstore/ESAliasStore.java b/repository/src/main/java/org/apache/atlas/repository/store/aliasstore/ESAliasStore.java index cd53a5c279..2d272cb8fc 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/aliasstore/ESAliasStore.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/aliasstore/ESAliasStore.java @@ -38,12 +38,7 @@ import org.springframework.stereotype.Component; import javax.inject.Inject; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; import static org.apache.atlas.ESAliasRequestBuilder.ESAliasAction.ADD; import static org.apache.atlas.repository.Constants.PERSONA_ENTITY_TYPE; @@ -70,6 +65,7 @@ @Component public class ESAliasStore implements IndexAliasStore { private static final Logger LOG = LoggerFactory.getLogger(ESAliasStore.class); + public static final String NEW_WILDCARD_DOMAIN_SUPER = "default/domain/*/super"; private final AtlasGraph graph; private final EntityGraphRetriever entityRetriever; @@ -90,7 +86,7 @@ public boolean createAlias(AtlasEntity entity) throws AtlasBaseException { ESAliasRequestBuilder requestBuilder = new ESAliasRequestBuilder(); if (PERSONA_ENTITY_TYPE.equals(entity.getTypeName())) { - requestBuilder.addAction(ADD, new AliasAction(getIndexNameFromAliasIfExists(VERTEX_INDEX_NAME), aliasName)); + requestBuilder.addAction(ADD, new AliasAction(getIndexNameFromAliasIfExists(VERTEX_INDEX_NAME), aliasName, getFilterForPersona(null, null))); } else { requestBuilder.addAction(ADD, new AliasAction(getIndexNameFromAliasIfExists(VERTEX_INDEX_NAME), aliasName, getFilterForPurpose(entity))); } @@ -129,6 +125,9 @@ public boolean updateAlias(AtlasEntity.AtlasEntityWithExtInfo accessControl, Atl if (PERSONA_ENTITY_TYPE.equals(accessControl.getEntity().getTypeName())) { filter = getFilterForPersona(accessControl, policy); + if (filter == null || filter.isEmpty()) { + filter = getEmptyFilter(); + } } else { filter = getFilterForPurpose(accessControl.getEntity()); } @@ -150,6 +149,10 @@ public boolean deleteAlias(String aliasName) throws AtlasBaseException { private Map getFilterForPersona(AtlasEntity.AtlasEntityWithExtInfo persona, AtlasEntity policy) throws AtlasBaseException { List> allowClauseList = new ArrayList<>(); + if (policy == null && persona == null){ + return getEmptyFilter(); + } + List policies = getPolicies(persona); if (policy != null) { policies.add(policy); @@ -212,8 +215,12 @@ private void personaPolicyToESDslClauses(List policies, } else if (getPolicyActions(policy).contains(ACCESS_READ_PERSONA_DOMAIN)) { for (String asset : assets) { - terms.add(asset); - allowClauseList.add(mapOf("wildcard", mapOf(QUALIFIED_NAME, asset + "/*"))); + if(!isAllDomain(asset)) { + terms.add(asset); + } else { + asset = NEW_WILDCARD_DOMAIN_SUPER; + } + allowClauseList.add(mapOf("wildcard", mapOf(QUALIFIED_NAME, asset + "*"))); } } else if (getPolicyActions(policy).contains(ACCESS_READ_PERSONA_SUB_DOMAIN)) { @@ -244,6 +251,9 @@ private void personaPolicyToESDslClauses(List policies, allowClauseList.add(mapOf("terms", mapOf(QUALIFIED_NAME, terms))); } + private boolean isAllDomain(String asset) { + return asset.equals("*/super") || asset.equals("*") || asset.equals(NEW_WILDCARD_DOMAIN_SUPER); + } private Map esClausesToFilter(List> allowClauseList) { if (CollectionUtils.isNotEmpty(allowClauseList)) { return mapOf("bool", mapOf("should", allowClauseList)); diff --git a/repository/src/main/java/org/apache/atlas/repository/store/bootstrap/AtlasTypeDefStoreInitializer.java b/repository/src/main/java/org/apache/atlas/repository/store/bootstrap/AtlasTypeDefStoreInitializer.java index 2a867452b6..0526a2466a 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/bootstrap/AtlasTypeDefStoreInitializer.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/bootstrap/AtlasTypeDefStoreInitializer.java @@ -26,7 +26,6 @@ import org.apache.atlas.RequestContext; import org.apache.atlas.authorize.AtlasAuthorizerFactory; import org.apache.atlas.exception.AtlasBaseException; -import org.apache.atlas.featureflag.FeatureFlagStore; import org.apache.atlas.ha.HAConfiguration; import org.apache.atlas.listener.ActiveStateChangeHandler; import org.apache.atlas.model.TypeCategory; @@ -74,7 +73,11 @@ import java.io.File; import java.nio.charset.StandardCharsets; import java.nio.file.Files; +import java.time.Duration; +import java.time.Instant; import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.PUBLIC_ONLY; @@ -147,7 +150,7 @@ private void loadBootstrapTypeDefs() { AtlasPatchRegistry patchRegistry = new AtlasPatchRegistry(graph); if (modelsDirContents != null && modelsDirContents.length > 0) { - Arrays.sort(modelsDirContents); + Arrays.sort(modelsDirContents); for (File folder : modelsDirContents) { if (folder.isFile()) { @@ -181,7 +184,7 @@ private void loadModelsInFolder(File typesDir, AtlasPatchRegistry patchRegistry) LOG.info("Types directory {} does not exist or not readable or has no typedef files", typesDirName ); } else { // sort the files by filename - Arrays.sort(typeDefFiles); + Arrays.sort(typeDefFiles); for (File typeDefFile : typeDefFiles) { if (typeDefFile.isFile()) { @@ -217,6 +220,62 @@ private void loadModelsInFolder(File typesDir, AtlasPatchRegistry patchRegistry) LOG.info("<== AtlasTypeDefStoreInitializer({})", typesDir); } +// /** +// * Load all the model files in the supplied folder followed by the contents of the patches folder. +// * @param typesDir + // */ +// private void loadModelsInFolder(File typesDir, AtlasPatchRegistry patchRegistry) { +// LOG.info("==> AtlasTypeDefStoreInitializer({})", typesDir); +// +// String typesDirName = typesDir.getName(); +// File[] typeDefFiles = typesDir.exists() ? typesDir.listFiles() : null; +// +// if (typeDefFiles == null || typeDefFiles.length == 0) { +// LOG.info("Types directory {} does not exist or not readable or has no typedef files", typesDirName); +// } else { +// // sort the files by filename +// Arrays.sort(typeDefFiles); +// +// List> futures = new ArrayList<>(); +// +// for (File typeDefFile : typeDefFiles) { +// if (!typeDefFile.isFile()) { +// continue; +// } +// +// CompletableFuture future = CompletableFuture.runAsync(() -> { +// try { +// String jsonStr = new String(Files.readAllBytes(typeDefFile.toPath()), StandardCharsets.UTF_8); +// AtlasTypesDef typesDef = AtlasType.fromJson(jsonStr, AtlasTypesDef.class); +// +// if (typesDef == null || typesDef.isEmpty()) { +// LOG.info("No type in file {}", typeDefFile.getAbsolutePath()); +// return; +// } +// +// AtlasTypesDef typesToCreate = getTypesToCreate(typesDef, typeRegistry); +// AtlasTypesDef typesToUpdate = getTypesToUpdate(typesDef, typeRegistry, true); +// +// if (!typesToCreate.isEmpty() || !typesToUpdate.isEmpty()) { +// typeDefStore.createUpdateTypesDef(typesToCreate, typesToUpdate); +// LOG.info("Created/Updated types defined in file {}", typeDefFile.getAbsolutePath()); +// } else { +// LOG.info("No new type in file {}", typeDefFile.getAbsolutePath()); +// } +// } catch (Throwable t) { +// LOG.error("error while registering types in file {}", typeDefFile.getAbsolutePath(), t); +// } +// }); +// +// futures.add(future); +// } +// // Wait for all futures to complete +// CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).join(); +// applyTypePatches(typesDir.getPath(), patchRegistry); +// } +// LOG.info("<== AtlasTypeDefStoreInitializer({})", typesDir); +// } + public static AtlasTypesDef getTypesToCreate(AtlasTypesDef typesDef, AtlasTypeRegistry typeRegistry) { AtlasTypesDef typesToCreate = new AtlasTypesDef(); @@ -436,6 +495,72 @@ private static boolean isTypeUpdateApplicable(AtlasBaseTypeDef oldTypeDef, Atlas return ret; } +// private void applyTypePatches(String typesDirName, AtlasPatchRegistry patchRegistry) { +// String typePatchesDirName = typesDirName + File.separator + PATCHES_FOLDER_NAME; +// File typePatchesDir = new File(typePatchesDirName); +// File[] typePatchFiles = typePatchesDir.exists() ? typePatchesDir.listFiles() : null; +// +// if (typePatchFiles == null || typePatchFiles.length == 0) { +// LOG.info("Type patches directory {} does not exist or not readable or has no patches", typePatchesDirName); +// } else { +// LOG.info("Type patches directory {} is being processed", typePatchesDirName); +// +// // sort the files by filename +// Arrays.sort(typePatchFiles); +// +// PatchHandler[] patchHandlers = new PatchHandler[]{ +// new UpdateEnumDefPatchHandler(typeDefStore, typeRegistry), +// new AddAttributePatchHandler(typeDefStore, typeRegistry), +// new UpdateAttributePatchHandler(typeDefStore, typeRegistry), +// new RemoveLegacyRefAttributesPatchHandler(typeDefStore, typeRegistry), +// new UpdateTypeDefOptionsPatchHandler(typeDefStore, typeRegistry), +// new SetServiceTypePatchHandler(typeDefStore, typeRegistry), +// new UpdateAttributeMetadataHandler(typeDefStore, typeRegistry), +// new AddSuperTypePatchHandler(typeDefStore, typeRegistry), +// new AddMandatoryAttributePatchHandler(typeDefStore, typeRegistry) +// }; +// +// Map patchHandlerRegistry = new ConcurrentHashMap<>(); +// +// for (PatchHandler patchHandler : patchHandlers) { +// for (String supportedAction : patchHandler.getSupportedActions()) { +// patchHandlerRegistry.put(supportedAction, patchHandler); +// } +// } +// +// List> futures = new ArrayList<>(); +// +// for (File typePatchFile : typePatchFiles) { +// if (!typePatchFile.isFile()) { +// continue; +// } +// +// CompletableFuture future = CompletableFuture.runAsync(() -> { +// String patchFile = typePatchFile.getAbsolutePath(); +// LOG.info("Applying patches in file {}", patchFile); +// +// try { +// String jsonStr = new String(Files.readAllBytes(typePatchFile.toPath()), StandardCharsets.UTF_8); +// TypeDefPatches patches = AtlasType.fromJson(jsonStr, TypeDefPatches.class); +// +// if (patches == null || patches.getPatches().isEmpty()) { +// LOG.info("No patches in file {}", patchFile); +// return; +// } +// +// applyPatches(patchFile, patches, patchRegistry, patchHandlerRegistry); +// } catch (Throwable t) { +// LOG.error("Failed to apply patches in file {}. Ignored", patchFile, t); +// } +// }); +// +// futures.add(future); +// } +// +// // Wait for all futures to complete +// CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).join(); +// } +// } private void applyTypePatches(String typesDirName, AtlasPatchRegistry patchRegistry) { String typePatchesDirName = typesDirName + File.separator + PATCHES_FOLDER_NAME; @@ -499,12 +624,14 @@ private void applyTypePatches(String typesDirName, AtlasPatchRegistry patchRegis PatchStatus status; try { + Instant start = Instant.now(); status = patchHandler.applyPatch(patch); + LOG.info("Patch applied for handler {} : {}", patch.getId(), Duration.between(start, Instant.now()).toMillis()); } catch (AtlasBaseException ex) { status = FAILED; LOG.error("Failed to apply {} (status: {}; action: {}) in file: {}. Ignored.", - patch.getId(), status.toString(), patch.getAction(), patchFile); + patch.getId(), status.toString(), patch.getAction(), patchFile); } patchRegistry.register(patch.id, patch.description, TYPEDEF_PATCH_TYPE, patch.action, status); @@ -521,6 +648,38 @@ private void applyTypePatches(String typesDirName, AtlasPatchRegistry patchRegis } } +// +// private void applyPatches(String patchFile, TypeDefPatches patches, AtlasPatchRegistry patchRegistry, Map patchHandlerRegistry) { +// int patchIndex = 0; +// for (TypeDefPatch patch : patches.getPatches()) { +// PatchHandler patchHandler = patchHandlerRegistry.get(patch.getAction()); +// +// if (patchHandler == null) { +// LOG.error("Unknown patch action {} in file {}. Ignored", patch.getAction(), patchFile); +// continue; +// } +// +// if (!patchRegistry.isApplicable(patch.getId(), patchFile, patchIndex++)) { +// LOG.info("{} in file: {} already {}. Ignoring.", patch.getId(), patchFile, patchRegistry.getStatus(patch.getId()).toString()); +// continue; +// } +// +// PatchStatus status = applyPatch(patchHandler, patch, patchFile); +// patchRegistry.register(patch.id, patch.description, TYPEDEF_PATCH_TYPE, patch.action, status); +// LOG.info("{} (status: {}; action: {}) in file: {}", patch.getId(), status.toString(), patch.getAction(), patchFile); +// } +// } +// +// private PatchStatus applyPatch(PatchHandler patchHandler, TypeDefPatch patch, String patchFile) { +// try { +// return patchHandler.applyPatch(patch); +// } catch (AtlasBaseException ex) { +// LOG.error("Failed to apply {} (status: FAILED; action: {}) in file: {}. Ignored.", +// patch.getId(), patch.getAction(), patchFile); +// return FAILED; +// } +// } + /** * typedef patch details */ diff --git a/repository/src/main/java/org/apache/atlas/repository/store/bootstrap/AuthPoliciesBootstrapper.java b/repository/src/main/java/org/apache/atlas/repository/store/bootstrap/AuthPoliciesBootstrapper.java index 9b1327ba1f..5f4015d100 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/bootstrap/AuthPoliciesBootstrapper.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/bootstrap/AuthPoliciesBootstrapper.java @@ -68,6 +68,11 @@ private void startInternal() { if ("atlas".equalsIgnoreCase(authorizer)) { loadBootstrapAuthPolicies(); + + boolean overridePolicies = ApplicationProperties.get().getBoolean("atlas.authorizer.policy.override", false); + if (overridePolicies) { + overrideBootstrapAuthPolicies(); + } } else { LOG.info("AuthPoliciesBootstrapper: startInternal: Skipping as not needed"); } @@ -96,6 +101,20 @@ private void loadBootstrapAuthPolicies() { LOG.info("<== AuthPoliciesBootstrapper.loadBootstrapAuthPolicies()"); } + private void overrideBootstrapAuthPolicies() { + LOG.info("==> AuthPoliciesBootstrapper.overrideBootstrapAuthPolicies()"); + RequestContext.get().setSkipAuthorizationCheck(true); + try { + String atlasHomeDir = System.getProperty("atlas.home"); + String policiesDirName = (StringUtils.isEmpty(atlasHomeDir) ? "." : atlasHomeDir) + File.separator + "override-policies"; + File topPoliciesDir = new File(policiesDirName); + loadPoliciesInFolder(topPoliciesDir); + } finally { + RequestContext.get().setSkipAuthorizationCheck(false); + } + LOG.info("<== AuthPoliciesBootstrapper.overrideBootstrapAuthPolicies()"); + } + private void loadPoliciesInFolder (File folder) { LOG.info("==> AuthPoliciesBootstrapper.loadPoliciesInFolder({})", folder); diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasEntityStore.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasEntityStore.java index 02022fe040..5fd7680281 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasEntityStore.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasEntityStore.java @@ -25,7 +25,6 @@ import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.instance.AtlasEntityHeaders; import org.apache.atlas.model.instance.AtlasObjectId; import org.apache.atlas.model.instance.AtlasHasLineageRequests; import org.apache.atlas.model.instance.EntityMutationResponse; @@ -68,7 +67,6 @@ public interface AtlasEntityStore { */ AtlasEntityWithExtInfo getById(String guid, boolean isMinExtInfo, boolean ignoreRelationships) throws AtlasBaseException; - /** * Get entity header for the given GUID * @param guid @@ -266,6 +264,11 @@ EntityMutationResponse deleteByUniqueAttributes(List objectIds) */ EntityMutationResponse deleteByIds(List guid) throws AtlasBaseException; + /* + * Repair classification mappings + */ + public void repairClassificationMappings(final String guid) throws AtlasBaseException; + /* * Return list of deleted entity guids */ @@ -293,6 +296,10 @@ EntityMutationResponse deleteByUniqueAttributes(List objectIds) */ void deleteClassification(String guid, String classificationName) throws AtlasBaseException; + void deleteClassifications(String guid, List classificationName) throws AtlasBaseException; + + public void deleteClassifications(final String guid, final List classifications, final String associatedEntityGuid) throws AtlasBaseException; + void deleteClassification(String guid, String classificationName, String associatedEntityGuid) throws AtlasBaseException; List getClassifications(String guid) throws AtlasBaseException; @@ -359,4 +366,6 @@ EntityMutationResponse deleteByUniqueAttributes(List objectIds) void repairMeaningAttributeForTerms(List termGuids) throws AtlasBaseException; + void repairAccesscontrolAlias(String guid) throws AtlasBaseException; + } diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasTypeDefGraphStore.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasTypeDefGraphStore.java index a4f818980f..f359aab553 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasTypeDefGraphStore.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasTypeDefGraphStore.java @@ -43,12 +43,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.Arrays; +import java.util.*; import static org.apache.atlas.model.discovery.SearchParameters.ALL_ENTITY_TYPES; import static org.apache.atlas.model.discovery.SearchParameters.ALL_CLASSIFICATION_TYPES; @@ -878,42 +873,31 @@ private void rectifyTypeErrorsIfAny(AtlasTypesDef typesDef) { } private void removeDuplicateTypeIfAny(List defList) { + if (defList == null || defList.isEmpty()) { + return; + } + final Set entityDefNames = new HashSet<>(); + Iterator iterator = defList.iterator(); - for (int i = 0; i < defList.size(); i++) { - if (!entityDefNames.add((defList.get(i)).getName())) { - LOG.warn(" Found Duplicate Type => " + defList.get(i).getName()); - defList.remove(i); - i--; + while (iterator.hasNext()) { + T def = iterator.next(); + if (!entityDefNames.add(def.getName())) { + LOG.warn("Found Duplicate Type => " + def.getName()); + iterator.remove(); } } } - private void rectifyAttributesIfNeeded(final Set entityNames, AtlasStructDef structDef) { List attributeDefs = structDef.getAttributeDefs(); - if (CollectionUtils.isNotEmpty(attributeDefs)) { - for (AtlasAttributeDef attributeDef : attributeDefs) { - if (!hasOwnedReferenceConstraint(attributeDef.getConstraints())) { - continue; - } - - Set referencedTypeNames = AtlasTypeUtil.getReferencedTypeNames(attributeDef.getTypeName()); - - boolean valid = false; - - for (String referencedTypeName : referencedTypeNames) { - if (entityNames.contains(referencedTypeName)) { - valid = true; - break; - } - } - - if (!valid) { - rectifyOwnedReferenceError(structDef, attributeDef); - } - } + if (attributeDefs != null) { + attributeDefs.stream() + .filter(attributeDef -> hasOwnedReferenceConstraint(attributeDef.getConstraints())) + .filter(attributeDef -> AtlasTypeUtil.getReferencedTypeNames(attributeDef.getTypeName()).stream() + .noneMatch(entityNames::contains)) + .forEach(attributeDef -> rectifyOwnedReferenceError(structDef, attributeDef)); } } @@ -932,16 +916,18 @@ private boolean hasOwnedReferenceConstraint(List constraints private void rectifyOwnedReferenceError(AtlasStructDef structDef, AtlasAttributeDef attributeDef) { List constraints = attributeDef.getConstraints(); - if (CollectionUtils.isNotEmpty(constraints)) { - for (int i = 0; i < constraints.size(); i++) { - AtlasConstraintDef constraint = constraints.get(i); + if (constraints == null || constraints.isEmpty()) { + return; + } - if (constraint.isConstraintType(AtlasConstraintDef.CONSTRAINT_TYPE_OWNED_REF)) { - LOG.warn("Invalid constraint ownedRef for attribute {}.{}", structDef.getName(), attributeDef.getName()); + Iterator iterator = constraints.iterator(); - constraints.remove(i); - i--; - } + while (iterator.hasNext()) { + AtlasConstraintDef constraint = iterator.next(); + + if (constraint.isConstraintType(AtlasConstraintDef.CONSTRAINT_TYPE_OWNED_REF)) { + LOG.warn("Invalid constraint ownedRef for attribute {}.{}", structDef.getName(), attributeDef.getName()); + iterator.remove(); } } } diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/DeleteHandlerV1.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/DeleteHandlerV1.java index a4358e2967..bf0fb2f8af 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/DeleteHandlerV1.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/DeleteHandlerV1.java @@ -53,6 +53,7 @@ import org.apache.atlas.type.AtlasStructType.AtlasAttribute.AtlasRelationshipEdgeDirection; import org.apache.atlas.utils.AtlasEntityUtil; import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.atlas.utils.AtlasPerfMetrics.MetricRecorder; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang.StringUtils; @@ -118,6 +119,7 @@ public DeleteHandlerV1(AtlasGraph graph, AtlasTypeRegistry typeRegistry, boolean * @throws AtlasException */ public void deleteEntities(Collection instanceVertices) throws AtlasBaseException { + MetricRecorder metric = RequestContext.get().startMetricRecord("deleteEntities"); final RequestContext requestContext = RequestContext.get(); final Set deletionCandidateVertices = new HashSet<>(); @@ -160,6 +162,7 @@ public void deleteEntities(Collection instanceVertices) throws Atla } } } + RequestContext.get().endMetricRecord(metric); } /** @@ -754,6 +757,7 @@ protected void deleteEdge(AtlasEdge edge, boolean updateInverseAttribute, boolea } protected void deleteTypeVertex(AtlasVertex instanceVertex, TypeCategory typeCategory, boolean force) throws AtlasBaseException { + MetricRecorder metric = RequestContext.get().startMetricRecord("deleteTypeVertex"); switch (typeCategory) { case STRUCT: deleteTypeVertex(instanceVertex, force); @@ -771,6 +775,7 @@ protected void deleteTypeVertex(AtlasVertex instanceVertex, TypeCategory typeCat default: throw new IllegalStateException("Type category " + typeCategory + " not handled"); } + RequestContext.get().endMetricRecord(metric); } /** @@ -779,6 +784,7 @@ protected void deleteTypeVertex(AtlasVertex instanceVertex, TypeCategory typeCat * @throws AtlasException */ protected void deleteTypeVertex(AtlasVertex instanceVertex, boolean force) throws AtlasBaseException { + MetricRecorder metric = RequestContext.get().startMetricRecord("deleteTypeVertex"); if (LOG.isDebugEnabled()) { LOG.debug("Deleting {}, force={}", string(instanceVertex), force); } @@ -852,6 +858,7 @@ protected void deleteTypeVertex(AtlasVertex instanceVertex, boolean force) throw } deleteVertex(instanceVertex, force); + RequestContext.get().endMetricRecord(metric); } protected AtlasAttribute getAttributeForEdge(AtlasEdge edge) throws AtlasBaseException { @@ -888,6 +895,7 @@ protected void deleteEdgeBetweenVertices(AtlasVertex outVertex, AtlasVertex inVe if (skipVertexForDelete(outVertex)) { return; } + MetricRecorder metric = RequestContext.get().startMetricRecord("deleteEdgeBetweenVertices"); AtlasStructType parentType = (AtlasStructType) typeRegistry.getType(GraphHelper.getTypeName(outVertex)); String propertyName = getQualifiedAttributePropertyKey(parentType, attribute.getName()); @@ -988,9 +996,11 @@ protected void deleteEdgeBetweenVertices(AtlasVertex outVertex, AtlasVertex inVe requestContext.recordEntityUpdate(entityRetriever.toAtlasEntityHeader(outVertex)); } } + RequestContext.get().endMetricRecord(metric); } protected void deleteVertex(AtlasVertex instanceVertex, boolean force) throws AtlasBaseException { + MetricRecorder metric = RequestContext.get().startMetricRecord("deleteVertex"); if (LOG.isDebugEnabled()) { LOG.debug("Setting the external references to {} to null(removing edges)", string(instanceVertex)); } @@ -1020,7 +1030,6 @@ protected void deleteVertex(AtlasVertex instanceVertex, boolean force) throws At if (!isDeletedEntity(outVertex)) { AtlasVertex inVertex = edge.getInVertex(); AtlasAttribute attribute = getAttributeForEdge(edge); - deleteEdgeBetweenVertices(outVertex, inVertex, attribute); } } @@ -1028,6 +1037,7 @@ protected void deleteVertex(AtlasVertex instanceVertex, boolean force) throws At } _deleteVertex(instanceVertex, force); + RequestContext.get().endMetricRecord(metric); } private boolean isDeletedEntity(AtlasVertex entityVertex) { @@ -1110,6 +1120,7 @@ private void deleteAllClassifications(AtlasVertex instanceVertex) throws AtlasBa if (!ACTIVE.equals(getState(instanceVertex))) return; + MetricRecorder metric = RequestContext.get().startMetricRecord("deleteAllClassifications"); List classificationEdges = getAllClassificationEdges(instanceVertex); for (AtlasEdge edge : classificationEdges) { @@ -1127,6 +1138,7 @@ private void deleteAllClassifications(AtlasVertex instanceVertex) throws AtlasBa deleteEdgeReference(edge, CLASSIFICATION, false, false, instanceVertex); } + RequestContext.get().endMetricRecord(metric); } private boolean skipVertexForDelete(AtlasVertex vertex) { @@ -1336,12 +1348,13 @@ public void createAndQueueTask(String taskType, AtlasEdge relationshipEdge, Atla } public void createAndQueueClassificationRefreshPropagationTask(AtlasEdge edge) throws AtlasBaseException{ - if (taskManagement==null) { LOG.warn("Task management is null, can't schedule task now"); return; } + MetricRecorder metric = RequestContext.get().startMetricRecord("createAndQueueClassificationRefreshPropagationTask"); + String currentUser = RequestContext.getCurrentUser(); boolean isRelationshipEdge = isRelationshipEdge(edge); boolean isTermEntityEdge = GraphHelper.isTermEntityEdge(edge); @@ -1377,7 +1390,7 @@ public void createAndQueueClassificationRefreshPropagationTask(AtlasEdge edge) t RequestContext.get().queueTask(task); } - + RequestContext.get().endMetricRecord(metric); } private boolean skipClassificationTaskCreation(String classificationId) throws AtlasBaseException { diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/SoftDeleteHandlerV1.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/SoftDeleteHandlerV1.java index ed103e2402..18cdf30949 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/SoftDeleteHandlerV1.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/SoftDeleteHandlerV1.java @@ -30,6 +30,7 @@ import org.apache.atlas.repository.store.graph.v2.AtlasRelationshipStoreV2; import org.apache.atlas.tasks.TaskManagement; import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.utils.AtlasPerfMetrics.MetricRecorder; import org.apache.commons.collections.CollectionUtils; import javax.inject.Inject; @@ -68,6 +69,7 @@ protected void _deleteVertex(AtlasVertex instanceVertex, boolean force) { @Override protected void deleteEdge(AtlasEdge edge, boolean force) throws AtlasBaseException { + MetricRecorder metric = RequestContext.get().startMetricRecord("deleteEdge"); try { if (LOG.isDebugEnabled()) { LOG.debug("==> SoftDeleteHandlerV1.deleteEdge({}, {})", GraphHelper.string(edge), force); @@ -103,7 +105,8 @@ protected void deleteEdge(AtlasEdge edge, boolean force) throws AtlasBaseExcepti } catch (Exception e) { LOG.error("Error while deleting edge {}", GraphHelper.string(edge), e); throw new AtlasBaseException(e); + } finally { + RequestContext.get().endMetricRecord(metric); } - } } diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityChangeNotifier.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityChangeNotifier.java index 7fc95c3981..e0133148ea 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityChangeNotifier.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityChangeNotifier.java @@ -171,6 +171,7 @@ public void onClassificationAddedToEntity(AtlasEntity entity, List entities, List addedClassifications, boolean forceInline) throws AtlasBaseException { if (isV2EntityNotificationEnabled) { diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityStoreV2.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityStoreV2.java index 6e3487f8d3..5e627dfa07 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityStoreV2.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityStoreV2.java @@ -49,6 +49,7 @@ import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.patches.PatchContext; import org.apache.atlas.repository.patches.ReIndexPatch; +import org.apache.atlas.repository.store.aliasstore.ESAliasStore; import org.apache.atlas.repository.store.graph.AtlasEntityStore; import org.apache.atlas.repository.store.graph.AtlasRelationshipStore; import org.apache.atlas.repository.store.graph.EntityGraphDiscovery; @@ -58,10 +59,15 @@ import org.apache.atlas.repository.store.graph.v1.RestoreHandlerV1; import org.apache.atlas.repository.store.graph.v2.preprocessor.AuthPolicyPreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.ConnectionPreProcessor; +import org.apache.atlas.repository.store.graph.v2.preprocessor.accesscontrol.StakeholderPreProcessor; +import org.apache.atlas.repository.store.graph.v2.preprocessor.contract.ContractPreProcessor; +import org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh.StakeholderTitlePreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.resource.LinkPreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.accesscontrol.PersonaPreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.accesscontrol.PurposePreProcessor; +import org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh.DataProductPreProcessor; +import org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh.DataDomainPreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.glossary.CategoryPreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.glossary.GlossaryPreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.glossary.TermPreProcessor; @@ -110,6 +116,7 @@ import static org.apache.atlas.repository.graph.GraphHelper.getStatus; import static org.apache.atlas.repository.store.graph.v2.EntityGraphMapper.validateLabels; import static org.apache.atlas.repository.store.graph.v2.tasks.MeaningsTaskFactory.*; +import static org.apache.atlas.repository.util.AccessControlUtils.REL_ATTR_POLICIES; import static org.apache.atlas.type.Constants.HAS_LINEAGE; import static org.apache.atlas.type.Constants.HAS_LINEAGE_VALID; import static org.apache.atlas.type.Constants.MEANINGS_TEXT_PROPERTY_KEY; @@ -142,6 +149,8 @@ public class AtlasEntityStoreV2 implements AtlasEntityStore { private final AtlasRelationshipStore atlasRelationshipStore; private final FeatureFlagStore featureFlagStore; + private final ESAliasStore esAliasStore; + @Inject public AtlasEntityStoreV2(AtlasGraph graph, DeleteHandlerDelegate deleteDelegate, RestoreHandlerV1 restoreHandlerV1, AtlasTypeRegistry typeRegistry, IAtlasEntityChangeNotifier entityChangeNotifier, EntityGraphMapper entityGraphMapper, TaskManagement taskManagement, @@ -158,6 +167,7 @@ public AtlasEntityStoreV2(AtlasGraph graph, DeleteHandlerDelegate deleteDelegate this.taskManagement = taskManagement; this.atlasRelationshipStore = atlasRelationshipStore; this.featureFlagStore = featureFlagStore; + this.esAliasStore = new ESAliasStore(graph, entityRetriever); try { this.discovery = new EntityDiscoveryService(typeRegistry, graph, null, null, null, null); @@ -576,6 +586,8 @@ public EntityMutationResponse deleteById(final String guid) throws AtlasBaseExce throw new AtlasBaseException(AtlasErrorCode.INSTANCE_GUID_NOT_FOUND, guid); } + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("deleteById"); + Collection deletionCandidates = new ArrayList<>(); AtlasVertex vertex = AtlasGraphUtilsV2.findByGuid(graph, guid); @@ -601,6 +613,7 @@ public EntityMutationResponse deleteById(final String guid) throws AtlasBaseExce // Notify the change listeners entityChangeNotifier.onEntitiesMutated(ret, false); atlasRelationshipStore.onRelationshipsMutated(RequestContext.get().getRelationshipMutationMap()); + RequestContext.get().endMetricRecord(metricRecorder); return ret; } @@ -763,10 +776,12 @@ public EntityMutationResponse deleteByUniqueAttributes(AtlasEntityType entityTyp @Override @GraphTransaction public EntityMutationResponse deleteByUniqueAttributes(List objectIds) throws AtlasBaseException { + if (CollectionUtils.isEmpty(objectIds)) { throw new AtlasBaseException(AtlasErrorCode.INVALID_PARAMETERS); } + MetricRecorder metric = RequestContext.get().startMetricRecord("deleteByUniqueAttributes"); EntityMutationResponse ret = new EntityMutationResponse(); Collection deletionCandidates = new ArrayList<>(); try { @@ -810,10 +825,11 @@ public EntityMutationResponse deleteByUniqueAttributes(List objec // Notify the change listeners entityChangeNotifier.onEntitiesMutated(ret, false); atlasRelationshipStore.onRelationshipsMutated(RequestContext.get().getRelationshipMutationMap()); - } catch (Exception e) { LOG.error("Failed to delete objects:{}", objectIds.stream().map(AtlasObjectId::getUniqueAttributes).collect(Collectors.toList()), e); throw new AtlasBaseException(e); + } finally { + RequestContext.get().endMetricRecord(metric); } return ret; } @@ -913,6 +929,30 @@ public String getGuidByUniqueAttributes(AtlasEntityType entityType, Map repairClassificationMappings({})", guid); + } + + if (StringUtils.isEmpty(guid)) { + throw new AtlasBaseException(AtlasErrorCode.INSTANCE_GUID_NOT_FOUND, guid); + } + + AtlasVertex entityVertex = AtlasGraphUtilsV2.findByGuid(graph, guid); + + if (entityVertex == null) { + throw new AtlasBaseException(AtlasErrorCode.INSTANCE_GUID_NOT_FOUND, guid); + } + + entityGraphMapper.repairClassificationMappings(entityVertex); + + if (LOG.isDebugEnabled()) { + LOG.debug("<== repairClassificationMappings({})", guid); + } + } + @Override @GraphTransaction public void addClassifications(final String guid, final List classifications) throws AtlasBaseException { @@ -1071,6 +1111,42 @@ public void deleteClassification(final String guid, final String classificationN deleteClassification(guid, classificationName, null); } + @Override + @GraphTransaction + public void deleteClassifications(final String guid, final List classifications) throws AtlasBaseException { + deleteClassifications(guid, classifications, null); + } + + @Override + @GraphTransaction + public void deleteClassifications(final String guid, final List classifications, final String associatedEntityGuid) throws AtlasBaseException { + if (StringUtils.isEmpty(guid)) { + throw new AtlasBaseException(AtlasErrorCode.INVALID_PARAMETERS, "Guid(s) not specified"); + } + if (CollectionUtils.isEmpty(classifications)) { + throw new AtlasBaseException(AtlasErrorCode.INVALID_PARAMETERS, "classifications not specified"); + } + + GraphTransactionInterceptor.lockObjectAndReleasePostCommit(guid); + + AtlasEntityHeader entityHeader = entityRetriever.toAtlasEntityHeaderWithClassifications(guid); + + // verify authorization only for removal of directly associated classification and not propagated one. + for (AtlasClassification classification: classifications){ + if (StringUtils.isEmpty(associatedEntityGuid) || guid.equals(associatedEntityGuid)) { + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_REMOVE_CLASSIFICATION, + entityHeader, new AtlasClassification(classification.getTypeName())), + "remove classification: guid=", guid, ", classification=", classification.getDisplayName()); + } + + if (LOG.isDebugEnabled()) { + LOG.debug("Deleting classification={} from entity={}", classification.getDisplayName(), guid); + } + } + entityGraphMapper.deleteClassifications(guid, classifications, associatedEntityGuid); + } + + @Override @GraphTransaction public void deleteClassification(final String guid, final String classificationName, final String associatedEntityGuid) throws AtlasBaseException { @@ -1095,8 +1171,6 @@ entityHeader, new AtlasClassification(classificationName)), if (LOG.isDebugEnabled()) { LOG.debug("Deleting classification={} from entity={}", classificationName, guid); } - - entityGraphMapper.deleteClassification(guid, classificationName, associatedEntityGuid); } @@ -1409,6 +1483,33 @@ public void addLabels(String guid, Set labels) throws AtlasBaseException } } + private Map getMap(String key, Object value) { + Map map = new HashMap<>(); + map.put(key, value); + return map; + } + +// private Boolean isAccessAllowed(AtlasEntity entity, String action) { +// Map entityAttr = entity.getAttributes(); +// Map entityForAuth = getMap("objects", getMap("assetCriteria", getMap("attributes", entityAttr)));; +// +// String[] assetQualifiedNames = new String[1]; +// assetQualifiedNames[0] = (String) entity.getAttribute("qualifiedName"); +// ((Map) entityForAuth.get("objects")).put("assetQualifiedNames", assetQualifiedNames); +// +// +// String[] userArray = new String[1]; +// userArray[0] = RequestContext.getCurrentUser(); +// entityForAuth.put("subjects", getMap("users", userArray)); +// +// String[] actionArray = new String[1]; +// actionArray[0] = action; +// entityForAuth.put("actions", actionArray); +// +// Boolean accessAllowed = this.atlasAuthorization.isAccessAllowed(entityForAuth, RequestContext.getCurrentUser()); +// return accessAllowed; +// } + private EntityMutationResponse createOrUpdate(EntityStream entityStream, boolean isPartialUpdate, boolean replaceClassifications, boolean replaceBusinessAttributes, boolean isOverwriteBusinessAttribute) throws AtlasBaseException { if (LOG.isDebugEnabled()) { LOG.debug("==> createOrUpdate()"); @@ -1485,21 +1586,23 @@ private EntityMutationResponse createOrUpdate(EntityStream entityStream, boolean // Check if authorized to update entities if (!reqContext.isImportInProgress()) { for (AtlasEntity entity : context.getUpdatedEntities()) { - AtlasEntityHeader entityHeaderWithClassifications = entityRetriever.toAtlasEntityHeaderWithClassifications(entity.getGuid()); - AtlasEntityHeader entityHeader = new AtlasEntityHeader(entity); + if(!PreProcessor.skipUpdateAuthCheckTypes.contains(entity.getTypeName())){ + AtlasEntityHeader entityHeaderWithClassifications = entityRetriever.toAtlasEntityHeaderWithClassifications(entity.getGuid()); + AtlasEntityHeader entityHeader = new AtlasEntityHeader(entity); - if(CollectionUtils.isNotEmpty(entityHeaderWithClassifications.getClassifications())) { - entityHeader.setClassifications(entityHeaderWithClassifications.getClassifications()); - } + if(CollectionUtils.isNotEmpty(entityHeaderWithClassifications.getClassifications())) { + entityHeader.setClassifications(entityHeaderWithClassifications.getClassifications()); + } - AtlasEntity diffEntity = reqContext.getDifferentialEntity(entity.getGuid()); - boolean skipAuthBaseConditions = diffEntity != null && MapUtils.isEmpty(diffEntity.getCustomAttributes()) && MapUtils.isEmpty(diffEntity.getBusinessAttributes()) && CollectionUtils.isEmpty(diffEntity.getClassifications()) && CollectionUtils.isEmpty(diffEntity.getLabels()); - boolean skipAuthMeaningsUpdate = diffEntity != null && MapUtils.isNotEmpty(diffEntity.getRelationshipAttributes()) && diffEntity.getRelationshipAttributes().containsKey("meanings") && diffEntity.getRelationshipAttributes().size() == 1 && MapUtils.isEmpty(diffEntity.getAttributes()); - boolean skipAuthStarredDetailsUpdate = diffEntity != null && MapUtils.isEmpty(diffEntity.getRelationshipAttributes()) && MapUtils.isNotEmpty(diffEntity.getAttributes()) && diffEntity.getAttributes().size() == 3 && diffEntity.getAttributes().containsKey(ATTR_STARRED_BY) && diffEntity.getAttributes().containsKey(ATTR_STARRED_COUNT) && diffEntity.getAttributes().containsKey(ATTR_STARRED_DETAILS_LIST); - if (skipAuthBaseConditions && (skipAuthMeaningsUpdate || skipAuthStarredDetailsUpdate)) { - //do nothing, only diff is relationshipAttributes.meanings or starred, allow update - } else { - AtlasAuthorizationUtils.verifyUpdateEntityAccess(typeRegistry, entityHeader,"update entity: type=" + entity.getTypeName()); + AtlasEntity diffEntity = reqContext.getDifferentialEntity(entity.getGuid()); + boolean skipAuthBaseConditions = diffEntity != null && MapUtils.isEmpty(diffEntity.getCustomAttributes()) && MapUtils.isEmpty(diffEntity.getBusinessAttributes()) && CollectionUtils.isEmpty(diffEntity.getClassifications()) && CollectionUtils.isEmpty(diffEntity.getLabels()); + boolean skipAuthMeaningsUpdate = diffEntity != null && MapUtils.isNotEmpty(diffEntity.getRelationshipAttributes()) && diffEntity.getRelationshipAttributes().containsKey("meanings") && diffEntity.getRelationshipAttributes().size() == 1 && MapUtils.isEmpty(diffEntity.getAttributes()); + boolean skipAuthStarredDetailsUpdate = diffEntity != null && MapUtils.isEmpty(diffEntity.getRelationshipAttributes()) && MapUtils.isNotEmpty(diffEntity.getAttributes()) && diffEntity.getAttributes().size() == 3 && diffEntity.getAttributes().containsKey(ATTR_STARRED_BY) && diffEntity.getAttributes().containsKey(ATTR_STARRED_COUNT) && diffEntity.getAttributes().containsKey(ATTR_STARRED_DETAILS_LIST); + if (skipAuthBaseConditions && (skipAuthMeaningsUpdate || skipAuthStarredDetailsUpdate)) { + //do nothing, only diff is relationshipAttributes.meanings or starred, allow update + } else { + AtlasAuthorizationUtils.verifyUpdateEntityAccess(typeRegistry, entityHeader,"update entity: type=" + entity.getTypeName()); + } } } } @@ -1535,8 +1638,7 @@ private void executePreProcessor(EntityMutationContext context) throws AtlasBase PreProcessor preProcessor; List copyOfCreated = new ArrayList<>(context.getCreatedEntities()); - for (int i = 0; i < copyOfCreated.size() ; i++) { - AtlasEntity entity = ((List) context.getCreatedEntities()).get(i); + for (AtlasEntity entity : copyOfCreated) { entityType = context.getType(entity.getGuid()); preProcessor = getPreProcessor(entityType.getTypeName()); @@ -1546,8 +1648,7 @@ private void executePreProcessor(EntityMutationContext context) throws AtlasBase } List copyOfUpdated = new ArrayList<>(context.getUpdatedEntities()); - for (int i = 0; i < copyOfUpdated.size() ; i++) { - AtlasEntity entity = ((List) context.getUpdatedEntities()).get(i); + for (AtlasEntity entity: copyOfUpdated) { entityType = context.getType(entity.getGuid()); preProcessor = getPreProcessor(entityType.getTypeName()); @@ -1559,7 +1660,7 @@ private void executePreProcessor(EntityMutationContext context) throws AtlasBase private EntityMutationContext preCreateOrUpdate(EntityStream entityStream, EntityGraphMapper entityGraphMapper, boolean isPartialUpdate) throws AtlasBaseException { MetricRecorder metric = RequestContext.get().startMetricRecord("preCreateOrUpdate"); - this.graph.setEnableCache(RequestContext.get().isCacheEnabled()); + EntityGraphDiscovery graphDiscoverer = new AtlasEntityGraphDiscoveryV2(graph, typeRegistry, entityStream, entityGraphMapper); EntityGraphDiscoveryContext discoveryContext = graphDiscoverer.discoverEntities(); EntityMutationContext context = new EntityMutationContext(discoveryContext); @@ -1789,7 +1890,7 @@ public PreProcessor getPreProcessor(String typeName) { switch (typeName) { case ATLAS_GLOSSARY_ENTITY_TYPE: - preProcessor = new GlossaryPreProcessor(typeRegistry, entityRetriever); + preProcessor = new GlossaryPreProcessor(typeRegistry, entityRetriever, graph); break; case ATLAS_GLOSSARY_TERM_ENTITY_TYPE: @@ -1800,6 +1901,14 @@ public PreProcessor getPreProcessor(String typeName) { preProcessor = new CategoryPreProcessor(typeRegistry, entityRetriever, graph, taskManagement, entityGraphMapper); break; + case DATA_DOMAIN_ENTITY_TYPE: + preProcessor = new DataDomainPreProcessor(typeRegistry, entityRetriever, graph, this); + break; + + case DATA_PRODUCT_ENTITY_TYPE: + preProcessor = new DataProductPreProcessor(typeRegistry, entityRetriever, graph, this); + break; + case QUERY_ENTITY_TYPE: preProcessor = new QueryPreProcessor(typeRegistry, entityRetriever); break; @@ -1821,7 +1930,11 @@ public PreProcessor getPreProcessor(String typeName) { break; case POLICY_ENTITY_TYPE: - preProcessor = new AuthPolicyPreProcessor(graph, typeRegistry, entityRetriever, featureFlagStore); + preProcessor = new AuthPolicyPreProcessor(graph, typeRegistry, entityRetriever); + break; + + case STAKEHOLDER_ENTITY_TYPE: + preProcessor = new StakeholderPreProcessor(graph, typeRegistry, entityRetriever, this); break; case CONNECTION_ENTITY_TYPE: @@ -1835,6 +1948,14 @@ public PreProcessor getPreProcessor(String typeName) { case README_ENTITY_TYPE: preProcessor = new ReadmePreProcessor(typeRegistry, entityRetriever); break; + + case CONTRACT_ENTITY_TYPE: + preProcessor = new ContractPreProcessor(graph, typeRegistry, entityRetriever, storeDifferentialAudits, discovery); + break; + + case STAKEHOLDER_TITLE_ENTITY_TYPE: + preProcessor = new StakeholderTitlePreProcessor(graph, typeRegistry, entityRetriever); + break; } return preProcessor; @@ -1879,7 +2000,7 @@ private EntityMutationResponse deleteVertices(Collection deletionCa Collection categories = new ArrayList<>(); Collection others = new ArrayList<>(); - MetricRecorder metric = RequestContext.get().startMetricRecord("filterCategoryVertices"); + MetricRecorder metric = RequestContext.get().startMetricRecord("deleteVertices_filterCategoryVertices"); for (AtlasVertex vertex : deletionCandidates) { String typeName = getTypeName(vertex); @@ -1895,7 +2016,7 @@ private EntityMutationResponse deleteVertices(Collection deletionCa } } RequestContext.get().endMetricRecord(metric); - + MetricRecorder metric2 = RequestContext.get().startMetricRecord("deleteVertices"); if (CollectionUtils.isNotEmpty(categories)) { entityGraphMapper.removeAttrForCategoryDelete(categories); deleteDelegate.getHandler(DeleteType.HARD).deleteEntities(categories); @@ -1924,11 +2045,11 @@ private EntityMutationResponse deleteVertices(Collection deletionCa for (AtlasEntityHeader entity : req.getUpdatedEntities()) { response.addEntity(UPDATE, entity); } + RequestContext.get().endMetricRecord(metric2); } catch (Exception e) { LOG.error("Delete vertices request failed", e); throw new AtlasBaseException(e); } - return response; } @@ -2656,6 +2777,35 @@ private void repairMeanings(AtlasVertex assetVertex) { } } + @Override + public void repairAccesscontrolAlias(String guid) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("repairAlias"); + // Fetch accesscontrolEntity with extInfo + AtlasEntity.AtlasEntityWithExtInfo accesscontrolEntity = entityRetriever.toAtlasEntityWithExtInfo(guid); + + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_UPDATE, new AtlasEntityHeader(accesscontrolEntity.getEntity()))); + + // Validate accesscontrolEntity status + if (accesscontrolEntity.getEntity().getStatus() != ACTIVE) { + throw new AtlasBaseException(AtlasErrorCode.INSTANCE_GUID_DELETED, guid); + } + + // Validate accesscontrolEntity type + String entityType = accesscontrolEntity.getEntity().getTypeName(); + if (!PERSONA_ENTITY_TYPE.equals(entityType)) { + throw new AtlasBaseException(AtlasErrorCode.OPERATION_NOT_SUPPORTED, entityType); + } + + List policies = (List) accesscontrolEntity.getEntity().getRelationshipAttribute(REL_ATTR_POLICIES); + for (AtlasObjectId policy : policies) { + accesscontrolEntity.addReferredEntity(entityRetriever.toAtlasEntity(policy)); + } + + // Rebuild alias + this.esAliasStore.updateAlias(accesscontrolEntity, null); + + RequestContext.get().endMetricRecord(metric); + } } diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasGraphUtilsV2.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasGraphUtilsV2.java index 5f97d5645c..42d30d39ca 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasGraphUtilsV2.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasGraphUtilsV2.java @@ -35,8 +35,9 @@ import org.apache.atlas.type.AtlasEntityType; import org.apache.atlas.type.AtlasEnumType; import org.apache.atlas.type.AtlasStructType; -import org.apache.atlas.type.AtlasStructType.AtlasAttribute; +import org.apache.atlas.type.AtlasTypeRegistry; import org.apache.atlas.type.AtlasType; +import org.apache.atlas.type.AtlasStructType.AtlasAttribute; import org.apache.atlas.util.FileUtils; import org.apache.atlas.utils.AtlasPerfMetrics; import org.apache.atlas.utils.AtlasPerfMetrics.MetricRecorder; @@ -349,14 +350,16 @@ public static AtlasVertex findByUniqueAttributes(AtlasGraph graph, AtlasEntityTy vertex = findByTypeAndUniquePropertyName(graph, typeName, uniqAttrValues); // if no instance of given typeName is found, try to find an instance of type's sub-type - if (vertex == null && !entitySubTypes.isEmpty()) { + // Added exception for few types to solve https://atlanhq.atlassian.net/browse/PLT-1638 + if (vertex == null && !entitySubTypes.isEmpty() && !AtlasTypeRegistry.TYPENAMES_TO_SKIP_SUPER_TYPE_CHECK.contains(typeName)) { vertex = findBySuperTypeAndUniquePropertyName(graph, typeName, uniqAttrValues); } } else { vertex = findByTypeAndPropertyName(graph, typeName, attrNameValues); // if no instance of given typeName is found, try to find an instance of type's sub-type - if (vertex == null && !entitySubTypes.isEmpty()) { + // Added exception for few types to solve https://atlanhq.atlassian.net/browse/PLT-1638 + if (vertex == null && !entitySubTypes.isEmpty() && !AtlasTypeRegistry.TYPENAMES_TO_SKIP_SUPER_TYPE_CHECK.contains(typeName)) { vertex = findBySuperTypeAndPropertyName(graph, typeName, attrNameValues); } } diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasRelationshipStoreV2.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasRelationshipStoreV2.java index 3e8c8b9e42..afdf2825f1 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasRelationshipStoreV2.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasRelationshipStoreV2.java @@ -68,12 +68,8 @@ import static org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags.NONE; import static org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags.ONE_TO_TWO; import static org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags.TWO_TO_ONE; -import static org.apache.atlas.repository.Constants.ENTITY_TYPE_PROPERTY_KEY; -import static org.apache.atlas.repository.Constants.HOME_ID_KEY; -import static org.apache.atlas.repository.Constants.PROVENANCE_TYPE_KEY; -import static org.apache.atlas.repository.Constants.RELATIONSHIPTYPE_TAG_PROPAGATION_KEY; -import static org.apache.atlas.repository.Constants.RELATIONSHIP_GUID_PROPERTY_KEY; -import static org.apache.atlas.repository.Constants.VERSION_PROPERTY_KEY; +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.graph.GraphHelper.getTypeName; import static org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2.*; import static org.apache.atlas.repository.store.graph.v2.tasks.ClassificationPropagateTaskFactory.CLASSIFICATION_PROPAGATION_RELATIONSHIP_UPDATE; @@ -104,6 +100,16 @@ public class AtlasRelationshipStoreV2 implements AtlasRelationshipStore { private static final String END_2_DOC_ID_KEY = "end2DocId"; private static final String ES_DOC_ID_MAP_KEY = "esDocIdMap"; + private static Set EXCLUDE_MUTATION_REL_TYPE_NAMES = new HashSet() {{ + add(REL_DOMAIN_TO_DOMAINS); + add(REL_DOMAIN_TO_PRODUCTS); + add(REL_DOMAIN_TO_STAKEHOLDERS); + add(REL_STAKEHOLDER_TITLE_TO_STAKEHOLDERS); + add(REL_POLICY_TO_ACCESS_CONTROL); + add(REL_DATA_PRODUCT_TO_OUTPUT_PORTS); + add(REL_DATA_PRODUCT_TO_INPUT_PORTS); + }}; + public enum RelationshipMutation { RELATIONSHIP_CREATE, RELATIONSHIP_UPDATE, @@ -129,6 +135,8 @@ public AtlasRelationship create(AtlasRelationship relationship) throws AtlasBase LOG.debug("==> create({})", relationship); } + validateRelationshipType(relationship.getTypeName()); + AtlasVertex end1Vertex = getVertexFromEndPoint(relationship.getEnd1()); AtlasVertex end2Vertex = getVertexFromEndPoint(relationship.getEnd2()); @@ -161,6 +169,8 @@ public AtlasRelationship update(AtlasRelationship relationship) throws AtlasBase AtlasVertex end1Vertex = edge.getOutVertex(); AtlasVertex end2Vertex = edge.getInVertex(); + validateRelationshipType(edgeType); + // update shouldn't change endType if (StringUtils.isNotEmpty(relationship.getTypeName()) && !StringUtils.equalsIgnoreCase(edgeType, relationship.getTypeName())) { throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIP_UPDATE_TYPE_CHANGE_NOT_ALLOWED, guid, edgeType, relationship.getTypeName()); @@ -320,6 +330,8 @@ public void deleteByIds(List guids) throws AtlasBaseException { throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIP_ALREADY_DELETED, guid); } + validateRelationshipType(getTypeName(edge)); + edgesToDelete.add(edge); AtlasRelationship relationshipToDelete = entityRetriever.mapEdgeToAtlasRelationship(edge); deletedRelationships.add(relationshipToDelete); @@ -368,6 +380,9 @@ public void deleteById(String guid, boolean forceDelete) throws AtlasBaseExcepti if (getState(edge) == DELETED) { throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIP_ALREADY_DELETED, guid); } + + validateRelationshipType(getTypeName(edge)); + deleteDelegate.getHandler().resetHasLineageOnInputOutputDelete(Collections.singleton(edge), null); deleteDelegate.getHandler().deleteRelationships(Collections.singleton(edge), forceDelete); @@ -999,4 +1014,11 @@ private static void setEdgeVertexIdsInContext(AtlasEdge edge) { RequestContext.get().addRelationshipEndToVertexIdMapping(GraphHelper.getAtlasObjectIdForOutVertex(edge), edge.getOutVertex().getId()); RequestContext.get().addRelationshipEndToVertexIdMapping(GraphHelper.getAtlasObjectIdForInVertex(edge), edge.getInVertex().getId()); } + + private static void validateRelationshipType(String relationshipTypeName) throws AtlasBaseException { + if (EXCLUDE_MUTATION_REL_TYPE_NAMES.contains(relationshipTypeName)) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, + String.format("Mutating relationship of type %s is not supported via relationship APIs, please use entity APIs", relationshipTypeName)); + } + } } \ No newline at end of file diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/ClassificationAssociator.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/ClassificationAssociator.java index 6cd9ac4e4b..8bea44fca7 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/ClassificationAssociator.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/ClassificationAssociator.java @@ -193,45 +193,21 @@ public void setClassifications(Map map) throws AtlasB AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("commitChanges.notify"); Map> deleted = RequestContext.get().getDeletedClassificationAndVertices(); - Set allVertices = new HashSet<>(); - if (MapUtils.isNotEmpty(deleted)) { - for (AtlasClassification deletedClassification: deleted.keySet()) { - Collection vertices = deleted.get(deletedClassification); - List propagatedEntities = new ArrayList<>(); - - for (Object obj: vertices) { - AtlasVertex vertex = (AtlasVertex) obj; - AtlasEntity entity = instanceConverter.getAndCacheEntity(GraphHelper.getGuid(vertex), IGNORE_REL); - - allVertices.add(vertex); - propagatedEntities.add(entity); - } - - entityChangeNotifier.onClassificationsDeletedFromEntities(propagatedEntities, Collections.singletonList(deletedClassification)); + Map> entityClassification = getEntityClassificationsMapping(deleted); + for (Map.Entry> atlasEntityListEntry : entityClassification.entrySet()) { + entityChangeNotifier.onClassificationDeletedFromEntity(atlasEntityListEntry.getKey(), atlasEntityListEntry.getValue()); } } Map> added = RequestContext.get().getAddedClassificationAndVertices(); if (MapUtils.isNotEmpty(added)) { - for (AtlasClassification addedClassification: added.keySet()) { - Collection vertices = added.get(addedClassification); - List propagatedEntities = new ArrayList<>(); - - for (Object obj: vertices) { - AtlasVertex vertex = (AtlasVertex) obj; - AtlasEntity entity = instanceConverter.getAndCacheEntity(GraphHelper.getGuid(vertex), IGNORE_REL); - - allVertices.add(vertex); - propagatedEntities.add(entity); - } - - entityChangeNotifier.onClassificationsAddedToEntities(propagatedEntities, Collections.singletonList(addedClassification), false); + Map> entityClassification = getEntityClassificationsMapping(added); + for (Map.Entry> atlasEntityListEntry : entityClassification.entrySet()) { + entityChangeNotifier.onClassificationAddedToEntity(atlasEntityListEntry.getKey(), atlasEntityListEntry.getValue()); } } - entityGraphMapper.updateClassificationText(null, allVertices); transactionInterceptHelper.intercept(); - RequestContext.get().endMetricRecord(recorder); RequestContext.get().setDelayTagNotifications(false); } @@ -310,14 +286,12 @@ private void deleteClassifications(String entityGuid, String typeName, List> getEntityClassificationsMapping(Map> classificationVertices) throws AtlasBaseException { + Map> entityClassifications = new HashMap<>(); + Set vertices = new HashSet<>(); + for (AtlasClassification classification : classificationVertices.keySet()) { + for (Object obj : classificationVertices.get(classification)) { + AtlasVertex vertex = (AtlasVertex) obj; + vertices.add(vertex); + } + List propagatedEntities = entityGraphMapper.updateClassificationText(null, vertices); + propagatedEntities.forEach(entity -> entityClassifications.computeIfAbsent(entity, key -> new ArrayList<>()).add(classification)); + } + return entityClassifications; + } } private static class ListOps { diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/DataMeshQNMigrationService.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/DataMeshQNMigrationService.java new file mode 100644 index 0000000000..7341e0703f --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/DataMeshQNMigrationService.java @@ -0,0 +1,464 @@ +package org.apache.atlas.repository.store.graph.v2; + +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.RequestContext; +import org.apache.atlas.discovery.EntityDiscoveryService; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.discovery.IndexSearchParams; +import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasEntityHeader; +import org.apache.atlas.repository.graph.GraphHelper; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.store.graph.AtlasEntityStore; +import org.apache.atlas.service.redis.RedisService; +import org.apache.atlas.type.AtlasEntityType; +import org.apache.atlas.type.AtlasType; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.util.NanoIdUtils; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.Constants.POLICY_ENTITY_TYPE; +import static org.apache.atlas.repository.graph.GraphHelper.getAllChildrenVertices; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; +import static org.apache.atlas.repository.util.AccessControlUtils.ATTR_POLICY_CATEGORY; +import static org.apache.atlas.repository.util.AccessControlUtils.ATTR_POLICY_RESOURCES; + +public class DataMeshQNMigrationService implements MigrationService { + + private static final Logger LOG = LoggerFactory.getLogger(DataMeshQNMigrationService.class); + + private final AtlasEntityStore entityStore; + private final EntityDiscoveryService discovery; + private final EntityGraphRetriever entityRetriever; + + private final AtlasTypeRegistry typeRegistry; + private final RedisService redisService; + private Map updatedPolicyResources; + + private final int BATCH_SIZE = 20; + + boolean errorOccured = false; + + boolean skipSuperDomain = false; + + private int counter; + private boolean forceRegen; + private final TransactionInterceptHelper transactionInterceptHelper; + + public DataMeshQNMigrationService(AtlasEntityStore entityStore, EntityDiscoveryService discovery, EntityGraphRetriever entityRetriever, AtlasTypeRegistry typeRegistry, TransactionInterceptHelper transactionInterceptHelper, RedisService redisService, boolean forceRegen) { + this.entityRetriever = entityRetriever; + this.entityStore = entityStore; + this.discovery = discovery; + this.typeRegistry = typeRegistry; + this.redisService = redisService; + this.transactionInterceptHelper = transactionInterceptHelper; + this.forceRegen = forceRegen; + + this.updatedPolicyResources = new HashMap<>(); + this.counter = 0; + } + + public void startMigration() throws Exception { + try { + redisService.putValue(DATA_MESH_QN, MigrationStatus.IN_PROGRESS.name()); + + Set attributes = new HashSet<>(Arrays.asList(SUPER_DOMAIN_QN_ATTR, PARENT_DOMAIN_QN_ATTR, "__customAttributes")); + + List entities = getEntity(DATA_DOMAIN_ENTITY_TYPE, attributes, null); + + for (AtlasEntityHeader superDomain: entities) { + skipSuperDomain = false; + updateChunk(superDomain); + } + } catch (Exception e) { + LOG.error("Migration failed", e); + redisService.putValue(DATA_MESH_QN, MigrationStatus.FAILED.name()); + throw e; + } + + redisService.putValue(DATA_MESH_QN, MigrationStatus.SUCCESSFUL.name()); + } + + private void updateChunk(AtlasEntityHeader atlasEntity) throws AtlasBaseException { + AtlasVertex vertex = entityRetriever.getEntityVertex(atlasEntity.getGuid()); + String qualifiedName = (String) atlasEntity.getAttribute(QUALIFIED_NAME); + + try{ + migrateDomainAttributes(vertex, "", ""); + + if (counter > 0) { + commitChanges(); + } + + } catch (AtlasBaseException e){ + this.errorOccured = true; + LOG.error("Error while migrating qualified name for entity: {}", qualifiedName, e); + } + } + + private void migrateDomainAttributes(AtlasVertex vertex, String parentDomainQualifiedName, String superDomainQualifiedName) throws AtlasBaseException { + if(skipSuperDomain) { + return; + } + + String currentQualifiedName = vertex.getProperty(QUALIFIED_NAME,String.class); + String updatedQualifiedName = createDomainQualifiedName(parentDomainQualifiedName); + + Map updatedAttributes = new HashMap<>(); + + Map customAttributes = GraphHelper.getCustomAttributes(vertex); + if(!this.forceRegen && customAttributes != null && customAttributes.get(MIGRATION_CUSTOM_ATTRIBUTE) != null && customAttributes.get(MIGRATION_CUSTOM_ATTRIBUTE).equals("true")){ + LOG.info("Entity already migrated: {}", currentQualifiedName); + + updatedQualifiedName = vertex.getProperty(QUALIFIED_NAME,String.class); + + if (StringUtils.isEmpty(superDomainQualifiedName)) { + superDomainQualifiedName = vertex.getProperty(QUALIFIED_NAME,String.class); + } + + } else { + counter++; + LOG.info("Migrating qualified name for Domain: {} to {}", currentQualifiedName, updatedQualifiedName); + superDomainQualifiedName = commitChangesInMemory(currentQualifiedName, updatedQualifiedName, parentDomainQualifiedName, superDomainQualifiedName, vertex, updatedAttributes); + } + + if (!skipSuperDomain) { + Iterator products = getAllChildrenVertices(vertex, DATA_PRODUCT_EDGE_LABEL); + List productsList = new ArrayList<>(); + products.forEachRemaining(productsList::add); + + for (AtlasVertex productVertex : productsList) { + if (Objects.nonNull(productVertex)) { + migrateDataProductAttributes(productVertex, updatedQualifiedName, superDomainQualifiedName); + } else { + LOG.warn("Found null product vertex"); + } + + if (skipSuperDomain) { + break; + } + } + + // Get all children domains of current domain + Iterator childDomains = getAllChildrenVertices(vertex, DOMAIN_PARENT_EDGE_LABEL); + List childDomainsList = new ArrayList<>(); + childDomains.forEachRemaining(childDomainsList::add); + + for (AtlasVertex childVertex : childDomainsList) { + if (Objects.nonNull(childVertex)) { + migrateDomainAttributes(childVertex, updatedQualifiedName, superDomainQualifiedName); + } else { + LOG.warn("Found null sub-domain vertex"); + } + + if (skipSuperDomain) { + break; + } + } + + recordUpdatedChildEntities(vertex, updatedAttributes); + if (counter >= BATCH_SIZE) { + commitChanges(); + } + } + } + + public void commitChanges() throws AtlasBaseException { + try { + updatePolicy(this.updatedPolicyResources); + } catch (AtlasBaseException e) { + this.errorOccured = true; + this.skipSuperDomain = true; + LOG.error("Failed to update set of policies: ", e); + LOG.error("Failed policies: {}", AtlasType.toJson(this.updatedPolicyResources)); + throw e; + } finally { + this.updatedPolicyResources.clear(); + } + + try { + transactionInterceptHelper.intercept(); + LOG.info("Committed a batch to the graph"); + } catch (Exception e){ + this.skipSuperDomain = true; + this.errorOccured = true; + LOG.error("Failed to commit set of assets: ", e); + throw e; + } finally { + this.counter = 0; + } + } + + public String commitChangesInMemory(String currentQualifiedName, String updatedQualifiedName, String parentDomainQualifiedName, String superDomainQualifiedName, AtlasVertex vertex, Map updatedAttributes) { + + if(skipSuperDomain) { + return ""; + } + + vertex.setProperty(QUALIFIED_NAME, updatedQualifiedName); + + if (StringUtils.isEmpty(parentDomainQualifiedName) && StringUtils.isEmpty(superDomainQualifiedName)){ + superDomainQualifiedName = updatedQualifiedName; + } else{ + vertex.setProperty(PARENT_DOMAIN_QN_ATTR, parentDomainQualifiedName); + vertex.setProperty(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + } + + updatedAttributes.put(QUALIFIED_NAME, updatedQualifiedName); + + //Store domainPolicies and resources to be updated + String currentResource = "entity:"+ currentQualifiedName; + String updatedResource = "entity:"+ updatedQualifiedName; + this.updatedPolicyResources.put(currentResource, updatedResource); + + Map customAttributes = GraphHelper.getCustomAttributes(vertex); + if(Objects.isNull(customAttributes) || MapUtils.isEmpty(customAttributes)) { + customAttributes = new HashMap<>(); + } + customAttributes.put(MIGRATION_CUSTOM_ATTRIBUTE, "true"); + vertex.setProperty(CUSTOM_ATTRIBUTES_PROPERTY_KEY, AtlasEntityType.toJson(customAttributes)); + + return superDomainQualifiedName; + } + + + private void migrateDataProductAttributes(AtlasVertex vertex, String parentDomainQualifiedName, String superDomainQualifiedName) throws AtlasBaseException { + if(skipSuperDomain) { + return; + } + + String currentQualifiedName = vertex.getProperty(QUALIFIED_NAME,String.class); + String updatedQualifiedName = createProductQualifiedName(parentDomainQualifiedName); + + Map customAttributes = GraphHelper.getCustomAttributes(vertex); + + if(!this.forceRegen && customAttributes != null && customAttributes.get(MIGRATION_CUSTOM_ATTRIBUTE) != null && customAttributes.get(MIGRATION_CUSTOM_ATTRIBUTE).equals("true")) { + LOG.info("Product already migrated: {}", currentQualifiedName); + + } else { + counter++; + LOG.info("Migrating qualified name for Product: {} to {}", currentQualifiedName, updatedQualifiedName); + vertex.setProperty(QUALIFIED_NAME, updatedQualifiedName); + + //Store domainPolicies and resources to be updated + String currentResource = "entity:" + currentQualifiedName; + String updatedResource = "entity:" + updatedQualifiedName; + this.updatedPolicyResources.put(currentResource, updatedResource); + + vertex.setProperty(PARENT_DOMAIN_QN_ATTR, parentDomainQualifiedName); + vertex.setProperty(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + + if(Objects.isNull(customAttributes) || MapUtils.isEmpty(customAttributes)) { + customAttributes = new HashMap<>(); + } + customAttributes.put(MIGRATION_CUSTOM_ATTRIBUTE, "true"); + vertex.setProperty(CUSTOM_ATTRIBUTES_PROPERTY_KEY, AtlasEntityType.toJson(customAttributes)); + } + + if(counter >= BATCH_SIZE){ + commitChanges(); + } + } + + protected void updatePolicy(Map updatedPolicyResources) throws AtlasBaseException { + if(skipSuperDomain) { + return; + } + + List currentResources = new ArrayList<>(updatedPolicyResources.keySet()); + LOG.info("Updating policies for entities {}", currentResources); + Map updatedAttributes = new HashMap<>(); + + List policies = getEntity(POLICY_ENTITY_TYPE,new HashSet<>(Arrays.asList(ATTR_POLICY_RESOURCES, ATTR_POLICY_CATEGORY)), currentResources); + if (CollectionUtils.isNotEmpty(policies)) { + int batchSize = BATCH_SIZE; + int totalPolicies = policies.size(); + + for (int i = 0; i < totalPolicies; i += batchSize) { + List entityList = new ArrayList<>(); + List batch = policies.subList(i, Math.min(i + batchSize, totalPolicies)); + + for (AtlasEntityHeader policy : batch) { + AtlasVertex policyVertex = entityRetriever.getEntityVertex(policy.getGuid()); + AtlasEntity policyEntity = entityRetriever.toAtlasEntity(policyVertex); + + List policyResources = (List) policyEntity.getAttribute(ATTR_POLICY_RESOURCES); + List updatedPolicyResourcesList = new ArrayList<>(); + + for (String resource : policyResources) { + if (updatedPolicyResources.containsKey(resource)) { + updatedPolicyResourcesList.add(updatedPolicyResources.get(resource)); + } else { + updatedPolicyResourcesList.add(resource); + } + } + updatedAttributes.put(ATTR_POLICY_RESOURCES, updatedPolicyResourcesList); + + policyEntity.setAttribute(ATTR_POLICY_RESOURCES, updatedPolicyResourcesList); + entityList.add(policyEntity); + recordUpdatedChildEntities(policyVertex, updatedAttributes); + } + + EntityStream entityStream = new AtlasEntityStream(entityList); + entityStore.createOrUpdate(entityStream, false); + } + } + } + + private static String createDomainQualifiedName(String parentDomainQualifiedName) { + if (StringUtils.isNotEmpty(parentDomainQualifiedName)) { + return parentDomainQualifiedName + "/domain/" + getUUID(); + } else{ + return "default/domain" + "/" + getUUID() + "/super"; + } + } + + private static String createProductQualifiedName(String parentDomainQualifiedName) throws AtlasBaseException { + if (StringUtils.isEmpty(parentDomainQualifiedName)) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Parent Domain Qualified Name cannot be empty or null"); + } + return parentDomainQualifiedName + "/product/" + getUUID(); + } + + public static String getUUID(){ + return NanoIdUtils.randomNanoId(); + } + + public List getEntity(String entityType, Set attributes, List resource) throws AtlasBaseException { + + List> mustClauseList = new ArrayList<>(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", entityType))); + + if(entityType.equals(DATA_DOMAIN_ENTITY_TYPE)){ + Map childBool = new HashMap<>(); + List > mustNotClauseList = new ArrayList<>(); + mustNotClauseList.add(mapOf("exists", mapOf("field", PARENT_DOMAIN_QN_ATTR))); + + Map shouldBool = new HashMap<>(); + shouldBool.put("must_not", mustNotClauseList); + + List > shouldClauseList = new ArrayList<>(); + shouldClauseList.add(mapOf("bool", shouldBool)); + + childBool.put("should", shouldClauseList); + mustClauseList.add(mapOf("bool", childBool)); + } + + if(entityType.equals(POLICY_ENTITY_TYPE)){ + mustClauseList.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustClauseList.add(mapOf("terms", mapOf("policyResources", resource))); + } + + Map bool = new HashMap<>(); + bool.put("must", mustClauseList); + + Map dsl = mapOf("query", mapOf("bool", bool)); + + List> sortList = new ArrayList<>(); + Map sortField = new HashMap<>(); + sortField.put("__timestamp", mapOf("order", "DESC")); + sortList.add(sortField); + dsl.put("sort", sortList); + + + List entities = indexSearchPaginated(dsl, attributes, discovery); + + return entities; + } + + public static List indexSearchPaginated(Map dsl, Set attributes, EntityDiscoveryService discovery) throws AtlasBaseException { + IndexSearchParams searchParams = new IndexSearchParams(); + List ret = new ArrayList<>(); + + List sortList = new ArrayList<>(0); + sortList.add(mapOf("__timestamp", mapOf("order", "asc"))); + sortList.add(mapOf("__guid", mapOf("order", "asc"))); + dsl.put("sort", sortList); + + int from = 0; + int size = 100; + boolean hasMore = true; + do { + dsl.put("from", from); + dsl.put("size", size); + searchParams.setDsl(dsl); + + if (CollectionUtils.isNotEmpty(attributes)) { + searchParams.setAttributes(attributes); + } + + List headers = discovery.directIndexSearch(searchParams).getEntities(); + + if (CollectionUtils.isNotEmpty(headers)) { + ret.addAll(headers); + } else { + hasMore = false; + } + + from += size; + + } while (hasMore); + + return ret; + } + + /** + * Record the updated child entities, it will be used to send notification and store audit logs + * @param entityVertex Child entity vertex + * @param updatedAttributes Updated attributes while updating required attributes on updating collection + */ + protected void recordUpdatedChildEntities(AtlasVertex entityVertex, Map updatedAttributes) { + RequestContext requestContext = RequestContext.get(); + + AtlasEntity entity = new AtlasEntity(); + entity = entityRetriever.mapSystemAttributes(entityVertex, entity); + entity.setAttributes(updatedAttributes); + requestContext.cacheDifferentialEntity(new AtlasEntity(entity)); + + AtlasEntityType entityType = typeRegistry.getEntityTypeByName(entity.getTypeName()); + + //Add the min info attributes to entity header to be sent as part of notification + if(entityType != null) { + AtlasEntity finalEntity = entity; + entityType.getMinInfoAttributes().values().stream().filter(attribute -> !updatedAttributes.containsKey(attribute.getName())).forEach(attribute -> { + Object attrValue = null; + try { + attrValue = entityRetriever.getVertexAttribute(entityVertex, attribute); + } catch (AtlasBaseException e) { + this.errorOccured = true; + LOG.error("Error while getting vertex attribute", e); + } + if(attrValue != null) { + finalEntity.setAttribute(attribute.getName(), attrValue); + } + }); + requestContext.recordEntityUpdate(new AtlasEntityHeader(finalEntity)); + } + + } + + public static Map mapOf(String key, Object value) { + Map map = new HashMap<>(); + map.put(key, value); + return map; + } + + @Override + public void run() { + try { + LOG.info("Starting migration: {}", DATA_MESH_QN); + startMigration(); + LOG.info("Finished migration: {}", DATA_MESH_QN); + } catch (Exception e) { + LOG.error("Error running migration : {}",e.toString()); + throw new RuntimeException(e); + } + } +} diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/DataProductInputsOutputsMigrationService.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/DataProductInputsOutputsMigrationService.java new file mode 100644 index 0000000000..2f33a32481 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/DataProductInputsOutputsMigrationService.java @@ -0,0 +1,101 @@ +package org.apache.atlas.repository.store.graph.v2; + +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.repository.graph.GraphHelper; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.commons.collections.CollectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; + +public class DataProductInputsOutputsMigrationService { + + private static final Logger LOG = LoggerFactory.getLogger(DataProductInputsOutputsMigrationService.class); + + private final EntityGraphRetriever entityRetriever; + + + private String productGuid; + private final TransactionInterceptHelper transactionInterceptHelper; + + public DataProductInputsOutputsMigrationService(EntityGraphRetriever entityRetriever, String productGuid, TransactionInterceptHelper transactionInterceptHelper) { + this.entityRetriever = entityRetriever; + this.transactionInterceptHelper = transactionInterceptHelper; + this.productGuid = productGuid; + } + + public void migrateProduct() throws Exception { + try { + AtlasVertex productVertex = entityRetriever.getEntityVertex(this.productGuid); + + boolean isCommitRequired = migrateAttr(productVertex); + if (isCommitRequired){ + LOG.info("Committing changes for Product: {}", this.productGuid); + commitChanges(); + } + else { + LOG.info("No changes to commit for Product: {} as no migration needed", this.productGuid); + } + + } catch (Exception e) { + LOG.error("Error while migration inputs/outputs for Dataproduct: {}", this.productGuid, e); + throw e; + } + } + + private boolean migrateAttr(AtlasVertex vertex) throws AtlasBaseException { + boolean isCommitRequired = false; + + List outputPortsRelationGuids = getAssetGuids(vertex, OUTPUT_PORT_PRODUCT_EDGE_LABEL); + List outputPortGuidsAttr = vertex.getMultiValuedProperty(OUTPUT_PORT_GUIDS_ATTR, String.class); + + + List inputPortsRelationGuids = getAssetGuids(vertex, INPUT_PORT_PRODUCT_EDGE_LABEL); + List inputPortGuidsAttr = vertex.getMultiValuedProperty(INPUT_PORT_GUIDS_ATTR, String.class); + + if(!CollectionUtils.isEqualCollection(outputPortsRelationGuids, outputPortGuidsAttr)) { + LOG.info("Migrating outputPort guid attribute: {} for Product: {}", OUTPUT_PORT_GUIDS_ATTR, this.productGuid); + addInternalAttr(vertex, OUTPUT_PORT_GUIDS_ATTR, outputPortsRelationGuids); + isCommitRequired = true; + } + + if(!CollectionUtils.isEqualCollection(inputPortsRelationGuids, inputPortGuidsAttr)) { + LOG.info("Migrating inputPort guid attribute: {} for Product: {}", INPUT_PORT_GUIDS_ATTR, this.productGuid); + addInternalAttr(vertex, INPUT_PORT_GUIDS_ATTR, inputPortsRelationGuids); + isCommitRequired = true; + } + + return isCommitRequired; + } + + public void commitChanges() throws AtlasBaseException { + try { + transactionInterceptHelper.intercept(); + LOG.info("Committed a entity to the graph"); + } catch (Exception e){ + LOG.error("Failed to commit asset: ", e); + throw e; + } + } + + private List getAssetGuids(AtlasVertex vertex, String edgeLabel) throws AtlasBaseException { + List guids = new ArrayList<>(); + Iterator activeParent = GraphHelper.getActiveParentVertices(vertex, edgeLabel); + while(activeParent.hasNext()) { + AtlasVertex child = activeParent.next(); + guids.add(child.getProperty(GUID_PROPERTY_KEY, String.class)); + } + return guids; + } + + private void addInternalAttr(AtlasVertex productVertex, String internalAttr, List currentGuids){ + productVertex.removeProperty(internalAttr); + if (CollectionUtils.isNotEmpty(currentGuids)) { + currentGuids.forEach(guid -> AtlasGraphUtilsV2.addEncodedProperty(productVertex, internalAttr , guid)); + } + } +} \ No newline at end of file diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphMapper.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphMapper.java index a95ca20f0c..782b6847c8 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphMapper.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphMapper.java @@ -137,8 +137,9 @@ import static org.apache.atlas.repository.graph.GraphHelper.getPropagatableClassifications; import static org.apache.atlas.repository.graph.GraphHelper.getClassificationEntityGuid; import static org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2.*; -import static org.apache.atlas.repository.store.graph.v2.tasks.ClassificationPropagateTaskFactory.CLASSIFICATION_PROPAGATION_ADD; -import static org.apache.atlas.repository.store.graph.v2.tasks.ClassificationPropagateTaskFactory.CLASSIFICATION_PROPAGATION_DELETE; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.INPUT_PORT_GUIDS_ATTR; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.OUTPUT_PORT_GUIDS_ATTR; +import static org.apache.atlas.repository.store.graph.v2.tasks.ClassificationPropagateTaskFactory.*; import static org.apache.atlas.type.AtlasStructType.AtlasAttribute.AtlasRelationshipEdgeDirection.IN; import static org.apache.atlas.type.AtlasStructType.AtlasAttribute.AtlasRelationshipEdgeDirection.OUT; import static org.apache.atlas.type.Constants.PENDING_TASKS_PROPERTY_KEY; @@ -170,6 +171,7 @@ public class EntityGraphMapper { private static final String TYPE_GLOSSARY= "AtlasGlossary"; private static final String TYPE_CATEGORY= "AtlasGlossaryCategory"; private static final String TYPE_TERM = "AtlasGlossaryTerm"; + private static final String TYPE_PRODUCT = "DataProduct"; private static final String TYPE_PROCESS = "Process"; private static final String ATTR_MEANINGS = "meanings"; private static final String ATTR_ANCHOR = "anchor"; @@ -222,8 +224,7 @@ public EntityGraphMapper(DeleteHandlerDelegate deleteDelegate, RestoreHandlerV1 this.entityRetriever = new EntityGraphRetriever(graph, typeRegistry); this.fullTextMapperV2 = fullTextMapperV2; this.taskManagement = taskManagement; - this.transactionInterceptHelper = transactionInterceptHelper; - } + this.transactionInterceptHelper = transactionInterceptHelper;} @VisibleForTesting public void setTasksUseFlag(boolean value) { @@ -1906,7 +1907,7 @@ public List mapArrayValue(AttributeMutationContext ctx, EntityMutationContext co AtlasAttribute inverseRefAttribute = attribute.getInverseRefAttribute(); Cardinality cardinality = attribute.getAttributeDef().getCardinality(); List removedElements = new ArrayList<>(); - List newElementsCreated = new ArrayList<>(); + List newElementsCreated = new ArrayList<>(); List allArrayElements = null; List currentElements; boolean deleteExistingRelations = shouldDeleteExistingRelations(ctx, attribute); @@ -2004,6 +2005,11 @@ public List mapArrayValue(AttributeMutationContext ctx, EntityMutationContext co case PROCESS_INPUTS: case PROCESS_OUTPUTS: addEdgesToContext(GraphHelper.getGuid(ctx.referringVertex), newElementsCreated, removedElements); break; + + case INPUT_PORT_PRODUCT_EDGE_LABEL: + case OUTPUT_PORT_PRODUCT_EDGE_LABEL: + addInternalProductAttr(ctx, newElementsCreated, removedElements); + break; } if (LOG.isDebugEnabled()) { @@ -2089,6 +2095,11 @@ public List appendArrayValue(AttributeMutationContext ctx, EntityMutationContext case PROCESS_INPUTS: case PROCESS_OUTPUTS: addEdgesToContext(GraphHelper.getGuid(ctx.referringVertex), newElementsCreated, new ArrayList<>(0)); break; + + case INPUT_PORT_PRODUCT_EDGE_LABEL: + case OUTPUT_PORT_PRODUCT_EDGE_LABEL: + addInternalProductAttr(ctx, newElementsCreated, null); + break; } if (LOG.isDebugEnabled()) { @@ -2156,6 +2167,11 @@ public List removeArrayValue(AttributeMutationContext ctx, EntityMutationContext case PROCESS_INPUTS: case PROCESS_OUTPUTS: addEdgesToContext(GraphHelper.getGuid(ctx.referringVertex), new ArrayList<>(0), removedElements); break; + + case INPUT_PORT_PRODUCT_EDGE_LABEL: + case OUTPUT_PORT_PRODUCT_EDGE_LABEL: + addInternalProductAttr(ctx, null , removedElements); + break; } if (LOG.isDebugEnabled()) { @@ -2193,6 +2209,40 @@ private void addEdgesToContext(String guid, List newElementsCreated, Lis } } + private void addInternalProductAttr(AttributeMutationContext ctx, List createdElements, List deletedElements) throws AtlasBaseException { + MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("addInternalProductAttrForAppend"); + AtlasVertex toVertex = ctx.getReferringVertex(); + String toVertexType = getTypeName(toVertex); + + if (CollectionUtils.isEmpty(createdElements) && CollectionUtils.isEmpty(deletedElements)){ + RequestContext.get().endMetricRecord(metricRecorder); + return; + } + + if (TYPE_PRODUCT.equals(toVertexType)) { + String attrName = ctx.getAttribute().getRelationshipEdgeLabel().equals(OUTPUT_PORT_PRODUCT_EDGE_LABEL) + ? OUTPUT_PORT_GUIDS_ATTR + : INPUT_PORT_GUIDS_ATTR; + + addOrRemoveDaapInternalAttr(toVertex, attrName, createdElements, deletedElements); + }else{ + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Can not update product relations while updating any asset"); + } + RequestContext.get().endMetricRecord(metricRecorder); + } + + private void addOrRemoveDaapInternalAttr(AtlasVertex toVertex, String internalAttr, List createdElements, List deletedElements) { + if (CollectionUtils.isNotEmpty(createdElements)) { + List addedGuids = createdElements.stream().map(x -> ((AtlasEdge) x).getOutVertex().getProperty("__guid", String.class)).collect(Collectors.toList()); + addedGuids.forEach(guid -> AtlasGraphUtilsV2.addEncodedProperty(toVertex, internalAttr, guid)); + } + + if (CollectionUtils.isNotEmpty(deletedElements)) { + List removedGuids = deletedElements.stream().map(x -> x.getOutVertex().getProperty("__guid", String.class)).collect(Collectors.toList()); + removedGuids.forEach(guid -> AtlasGraphUtilsV2.removeItemFromListPropertyValue(toVertex, internalAttr, guid)); + } + } + private boolean shouldDeleteExistingRelations(AttributeMutationContext ctx, AtlasAttribute attribute) { boolean ret = false; AtlasEntityType entityType = typeRegistry.getEntityTypeByName(AtlasGraphUtilsV2.getTypeName(ctx.getReferringVertex())); @@ -2981,6 +3031,87 @@ private void updateInConsistentOwnedMapVertices(AttributeMutationContext ctx, At } } + public void cleanUpClassificationPropagation(String classificationName) throws AtlasBaseException { + List vertices = GraphHelper.getAllAssetsWithClassificationAttached(graph, classificationName); + int totalVertexSize = vertices.size(); + LOG.info("To clean up tag {} from {} entities", classificationName, totalVertexSize); + int toIndex; + int offset = 0; + do { + toIndex = Math.min((offset + CHUNK_SIZE), totalVertexSize); + List entityVertices = vertices.subList(offset, toIndex); + List impactedGuids = entityVertices.stream().map(GraphHelper::getGuid).collect(Collectors.toList()); + try { + GraphTransactionInterceptor.lockObjectAndReleasePostCommit(impactedGuids); + for (AtlasVertex vertex : entityVertices) { + List deletedClassifications = new ArrayList<>(); + List classificationEdges = GraphHelper.getClassificationEdges(vertex, null, classificationName); + for (AtlasEdge edge : classificationEdges) { + AtlasClassification classification = entityRetriever.toAtlasClassification(edge.getInVertex()); + deletedClassifications.add(classification); + deleteDelegate.getHandler().deleteEdgeReference(edge, TypeCategory.CLASSIFICATION, false, true, null, vertex); + } + + AtlasEntity entity = repairClassificationMappings(vertex); + + entityChangeNotifier.onClassificationDeletedFromEntity(entity, deletedClassifications); + } + offset += CHUNK_SIZE; + } finally { + transactionInterceptHelper.intercept(); + LOG.info("Cleaned up {} entities for classification {}", offset, classificationName); + } + + } while (offset < totalVertexSize); + // Fetch all classificationVertex by classificationName and delete them if remaining + List classificationVertices = GraphHelper.getAllClassificationVerticesByClassificationName(graph, classificationName); + for (AtlasVertex classificationVertex : classificationVertices) { + deleteDelegate.getHandler().deleteClassificationVertex(classificationVertex, true); + } + transactionInterceptHelper.intercept(); + LOG.info("Completed cleaning up classification {}", classificationName); + } + + public AtlasEntity repairClassificationMappings(AtlasVertex entityVertex) throws AtlasBaseException { + String guid = GraphHelper.getGuid(entityVertex); + AtlasEntity entity = instanceConverter.getEntity(guid, ENTITY_CHANGE_NOTIFY_IGNORE_RELATIONSHIP_ATTRIBUTES); + + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_UPDATE_CLASSIFICATION, new AtlasEntityHeader(entity)), "repair classification mappings: guid=", guid); + List classificationNames = new ArrayList<>(); + List propagatedClassificationNames = new ArrayList<>(); + + if (entity.getClassifications() != null) { + List classifications = entity.getClassifications(); + for (AtlasClassification classification : classifications) { + if (isPropagatedClassification(classification, guid)) { + propagatedClassificationNames.add(classification.getTypeName()); + } else { + classificationNames.add(classification.getTypeName()); + } + } + } + //Delete array/set properties first + entityVertex.removeProperty(TRAIT_NAMES_PROPERTY_KEY); + entityVertex.removeProperty(PROPAGATED_TRAIT_NAMES_PROPERTY_KEY); + + + //Update classificationNames and propagatedClassificationNames in entityVertex + entityVertex.setProperty(CLASSIFICATION_NAMES_KEY, getDelimitedClassificationNames(classificationNames)); + entityVertex.setProperty(PROPAGATED_CLASSIFICATION_NAMES_KEY, getDelimitedClassificationNames(propagatedClassificationNames)); + entityVertex.setProperty(CLASSIFICATION_TEXT_KEY, fullTextMapperV2.getClassificationTextForEntity(entity)); + // Make classificationNames unique list as it is of type SET + classificationNames = classificationNames.stream().distinct().collect(Collectors.toList()); + //Update classificationNames and propagatedClassificationNames in entityHeader + for(String classificationName : classificationNames) { + AtlasGraphUtilsV2.addEncodedProperty(entityVertex, TRAIT_NAMES_PROPERTY_KEY, classificationName); + } + for (String classificationName : propagatedClassificationNames) { + entityVertex.addListProperty(PROPAGATED_TRAIT_NAMES_PROPERTY_KEY, classificationName); + } + + return entity; + } + public void addClassifications(final EntityMutationContext context, String guid, List classifications) throws AtlasBaseException { if (CollectionUtils.isNotEmpty(classifications)) { MetricRecorder metric = RequestContext.get().startMetricRecord("addClassifications"); @@ -3028,7 +3159,7 @@ public void addClassifications(final EntityMutationContext context, String guid, } if (restrictPropagationThroughHierarchy == null) { - classification.setRestrictPropagationThroughLineage(RESTRICT_PROPAGATION_THROUGH_HIERARCHY_DEFAULT); + classification.setRestrictPropagationThroughHierarchy(RESTRICT_PROPAGATION_THROUGH_HIERARCHY_DEFAULT); } // set associated entity id to classification @@ -3110,17 +3241,21 @@ public void addClassifications(final EntityMutationContext context, String guid, if (CollectionUtils.isNotEmpty(entitiesToPropagateTo)) { notificationVertices.addAll(entitiesToPropagateTo); } - - - for (AtlasClassification classification : addedClassifications.keySet()) { - Set vertices = addedClassifications.get(classification); - - if (RequestContext.get().isDelayTagNotifications()) { + if (RequestContext.get().isDelayTagNotifications()) { + for (AtlasClassification classification : addedClassifications.keySet()) { + Set vertices = addedClassifications.get(classification); RequestContext.get().addAddedClassificationAndVertices(classification, new ArrayList<>(vertices)); - } else { + } + } else { + Map> entityClassification = new HashMap<>(); + for (AtlasClassification classification : addedClassifications.keySet()) { + Set vertices = addedClassifications.get(classification); List propagatedEntities = updateClassificationText(classification, vertices); + propagatedEntities.forEach(entity -> entityClassification.computeIfAbsent(entity, key -> new ArrayList<>()).add(classification)); + } - entityChangeNotifier.onClassificationsAddedToEntities(propagatedEntities, Collections.singletonList(classification), false); + for (Map.Entry> atlasEntityListEntry : entityClassification.entrySet()) { + entityChangeNotifier.onClassificationAddedToEntity(atlasEntityListEntry.getKey(), atlasEntityListEntry.getValue()); } } @@ -3373,18 +3508,195 @@ public void deleteClassification(String entityGuid, String classificationName) t entityVertex.setProperty(CLASSIFICATION_NAMES_KEY, getClassificationNamesString(traitNames)); updateModificationMetadata(entityVertex); + Map> entityClassification = new HashMap<>(); if (RequestContext.get().isDelayTagNotifications()) { RequestContext.get().addDeletedClassificationAndVertices(classification, new ArrayList<>(entityVertices)); } else if (CollectionUtils.isNotEmpty(entityVertices)) { List propagatedEntities = updateClassificationText(classification, entityVertices); - + propagatedEntities.forEach(entity -> entityClassification.computeIfAbsent(entity, key -> new ArrayList<>()).add(classification)); //Sending audit request for all entities at once - entityChangeNotifier.onClassificationsDeletedFromEntities(propagatedEntities, Collections.singletonList(classification)); + for (Map.Entry> atlasEntityListEntry : entityClassification.entrySet()) { + entityChangeNotifier.onClassificationDeletedFromEntity(atlasEntityListEntry.getKey(), atlasEntityListEntry.getValue()); + } } AtlasPerfTracer.log(perf); } + public void deleteClassifications(String entityGuid, List classifications, String associatedEntityGuid) throws AtlasBaseException { + if (StringUtils.isEmpty(associatedEntityGuid) || associatedEntityGuid.equals(entityGuid)) { + deleteClassifications(entityGuid, classifications); + } else { + for (AtlasClassification classification : classifications) { + deletePropagatedClassifications(entityGuid, classification.getTypeName(), associatedEntityGuid); + } + } + } + + private void deletePropagatedClassifications(String entityGuid, String classificationName, String associatedEntityGuid) throws AtlasBaseException { + if (StringUtils.isEmpty(classificationName)) { + throw new AtlasBaseException(AtlasErrorCode.INVALID_CLASSIFICATION_PARAMS, "delete", entityGuid); + } + + AtlasVertex entityVertex = AtlasGraphUtilsV2.findByGuid(this.graph, entityGuid); + + if (entityVertex == null) { + throw new AtlasBaseException(AtlasErrorCode.INSTANCE_GUID_NOT_FOUND, entityGuid); + } + + deleteDelegate.getHandler().deletePropagatedClassification(entityVertex, classificationName, associatedEntityGuid); + } + + public void deleteClassifications(String entityGuid, List classifications) throws AtlasBaseException { + if (CollectionUtils.isEmpty(classifications)){ + return; + } + + AtlasPerfTracer perf = null; + + if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntityGraphMapper.deleteClassification"); + } + Map> deletedClassifications = new HashMap<>(); + + for (AtlasClassification classificationn : classifications) { + + if (StringUtils.isEmpty(classificationn.getTypeName())) { + throw new AtlasBaseException(AtlasErrorCode.INVALID_CLASSIFICATION_PARAMS, "delete", entityGuid); + } + + AtlasVertex entityVertex = AtlasGraphUtilsV2.findByGuid(this.graph, entityGuid); + + if (entityVertex == null) { + throw new AtlasBaseException(AtlasErrorCode.INSTANCE_GUID_NOT_FOUND, entityGuid); + } + + List traitNames = getTraitNames(entityVertex); + + if (CollectionUtils.isEmpty(traitNames)) { + throw new AtlasBaseException(AtlasErrorCode.NO_CLASSIFICATIONS_FOUND_FOR_ENTITY, entityGuid); + } + + String classificationName = classificationn.getTypeName(); + validateClassificationExists(traitNames, classificationName); + + AtlasVertex classificationVertex = getClassificationVertex(entityVertex, classificationName); + + // Get in progress task to see if there already is a propagation for this particular vertex + List inProgressTasks = taskManagement.getInProgressTasks(); + for (AtlasTask task : inProgressTasks) { + if (isTaskMatchingWithVertexIdAndEntityGuid(task, classificationVertex.getIdForDisplay(), entityGuid)) { + throw new AtlasBaseException(AtlasErrorCode.CLASSIFICATION_CURRENTLY_BEING_PROPAGATED, classificationName); + } + } + + AtlasClassification classification = entityRetriever.toAtlasClassification(classificationVertex); + + if (classification == null) { + throw new AtlasBaseException(AtlasErrorCode.CLASSIFICATION_NOT_FOUND, classificationName); + } + + // remove classification from propagated entities if propagation is turned on + final List entityVertices; + + if (isPropagationEnabled(classificationVertex)) { + if (taskManagement != null && DEFERRED_ACTION_ENABLED) { + boolean propagateDelete = true; + String classificationVertexId = classificationVertex.getIdForDisplay(); + + List entityTaskGuids = (List) entityVertex.getPropertyValues(PENDING_TASKS_PROPERTY_KEY, String.class); + + if (CollectionUtils.isNotEmpty(entityTaskGuids)) { + List entityPendingTasks = taskManagement.getByGuidsES(entityTaskGuids); + + boolean pendingTaskExists = entityPendingTasks.stream() + .anyMatch(x -> isTaskMatchingWithVertexIdAndEntityGuid(x, classificationVertexId, entityGuid)); + + if (pendingTaskExists) { + List entityClassificationPendingTasks = entityPendingTasks.stream() + .filter(t -> t.getParameters().containsKey("entityGuid") + && t.getParameters().containsKey("classificationVertexId")) + .filter(t -> t.getParameters().get("entityGuid").equals(entityGuid) + && t.getParameters().get("classificationVertexId").equals(classificationVertexId) + && t.getType().equals(CLASSIFICATION_PROPAGATION_ADD)) + .collect(Collectors.toList()); + for (AtlasTask entityClassificationPendingTask : entityClassificationPendingTasks) { + String taskGuid = entityClassificationPendingTask.getGuid(); + taskManagement.deleteByGuid(taskGuid, TaskManagement.DeleteType.SOFT); + AtlasGraphUtilsV2.deleteProperty(entityVertex, PENDING_TASKS_PROPERTY_KEY, taskGuid); +// propagateDelete = false; TODO: Uncomment when all unnecessary ADD tasks are resolved + } + } + } + + if (propagateDelete) { + createAndQueueTask(CLASSIFICATION_PROPAGATION_DELETE, entityVertex, classificationVertex.getIdForDisplay()); + } + + entityVertices = new ArrayList<>(); + } else { + entityVertices = deleteDelegate.getHandler().removeTagPropagation(classificationVertex); + + if (LOG.isDebugEnabled()) { + LOG.debug("Number of propagations to delete -> {}", entityVertices.size()); + } + } + } else { + entityVertices = new ArrayList<>(); + } + + // add associated entity to entityVertices list + if (!entityVertices.contains(entityVertex)) { + entityVertices.add(entityVertex); + } + + // remove classifications from associated entity + if (LOG.isDebugEnabled()) { + LOG.debug("Removing classification: [{}] from: [{}][{}] with edge label: [{}]", classificationName, + getTypeName(entityVertex), entityGuid, CLASSIFICATION_LABEL); + } + + AtlasEdge edge = getClassificationEdge(entityVertex, classificationVertex); + + deleteDelegate.getHandler().deleteEdgeReference(edge, CLASSIFICATION, false, true, entityVertex); + + traitNames.remove(classificationName); + + // update 'TRAIT_NAMES_PROPERTY_KEY' property + entityVertex.removePropertyValue(TRAIT_NAMES_PROPERTY_KEY, classificationName); + + // update 'CLASSIFICATION_NAMES_KEY' property + entityVertex.removeProperty(CLASSIFICATION_NAMES_KEY); + + entityVertex.setProperty(CLASSIFICATION_NAMES_KEY, getClassificationNamesString(traitNames)); + + updateModificationMetadata(entityVertex); + + if(deletedClassifications.get(classification) == null) { + deletedClassifications.put(classification, new HashSet<>()); + } + //Add current Vertex to be notified + deletedClassifications.get(classification).add(entityVertex); + } + + Map> entityClassification = new HashMap<>(); + + for (AtlasClassification classification : deletedClassifications.keySet()) { + Set vertices = deletedClassifications.get(classification); + if (CollectionUtils.isNotEmpty(vertices)) { + List propagatedEntities = updateClassificationText(classification, vertices); + propagatedEntities.forEach(entity -> entityClassification.computeIfAbsent(entity, key -> new ArrayList<>()).add(classification)); + } + } + + //Sending audit request for all entities at once + for (Map.Entry> atlasEntityListEntry : entityClassification.entrySet()) { + entityChangeNotifier.onClassificationDeletedFromEntity(atlasEntityListEntry.getKey(), atlasEntityListEntry.getValue()); + } + + AtlasPerfTracer.log(perf); + + } private boolean isTaskMatchingWithVertexIdAndEntityGuid(AtlasTask task, String classificationVertexId, String entityGuid) { try { if (CLASSIFICATION_PROPAGATION_ADD.equals(task.getType())) { @@ -3459,6 +3771,7 @@ public void updateClassifications(EntityMutationContext context, String guid, Li if (CollectionUtils.isEmpty(classifications)) { throw new AtlasBaseException(AtlasErrorCode.INVALID_CLASSIFICATION_PARAMS, "update", guid); } + entityRetriever.verifyClassificationsPropagationMode(classifications); AtlasVertex entityVertex = AtlasGraphUtilsV2.findByGuid(this.graph, guid); @@ -3580,6 +3893,20 @@ public void updateClassifications(EntityMutationContext context, String guid, Li Boolean updatedRestrictPropagationThroughLineage = classification.getRestrictPropagationThroughLineage(); Boolean currentRestrictPropagationThroughHierarchy = currentClassification.getRestrictPropagationThroughHierarchy(); Boolean updatedRestrictPropagationThroughHierarchy = classification.getRestrictPropagationThroughHierarchy(); + if (updatedRestrictPropagationThroughLineage == null) { + updatedRestrictPropagationThroughLineage = currentRestrictPropagationThroughLineage; + classification.setRestrictPropagationThroughLineage(updatedRestrictPropagationThroughLineage); + } + if (updatedRestrictPropagationThroughHierarchy == null) { + updatedRestrictPropagationThroughHierarchy = currentRestrictPropagationThroughHierarchy; + classification.setRestrictPropagationThroughHierarchy(updatedRestrictPropagationThroughHierarchy); + } + + String propagationMode = CLASSIFICATION_PROPAGATION_MODE_DEFAULT; + if (updatedTagPropagation) { + // determinePropagationMode also validates the propagation restriction option values + propagationMode = entityRetriever.determinePropagationMode(updatedRestrictPropagationThroughLineage, updatedRestrictPropagationThroughHierarchy); + } if ((!Objects.equals(updatedRemovePropagations, currentRemovePropagations) || !Objects.equals(currentTagPropagation, updatedTagPropagation) || @@ -3587,11 +3914,14 @@ public void updateClassifications(EntityMutationContext context, String guid, Li taskManagement != null && DEFERRED_ACTION_ENABLED) { String propagationType = CLASSIFICATION_PROPAGATION_ADD; - if (removePropagation || !updatedTagPropagation) - { + if(currentRestrictPropagationThroughLineage != updatedRestrictPropagationThroughLineage || currentRestrictPropagationThroughHierarchy != updatedRestrictPropagationThroughHierarchy){ + propagationType = CLASSIFICATION_REFRESH_PROPAGATION; + } + if (removePropagation || !updatedTagPropagation) { propagationType = CLASSIFICATION_PROPAGATION_DELETE; } createAndQueueTask(propagationType, entityVertex, classificationVertex.getIdForDisplay(), currentRestrictPropagationThroughLineage,currentRestrictPropagationThroughHierarchy); + updatedTagPropagation = null; } // compute propagatedEntityVertices once and use it for subsequent iterations and notifications @@ -3599,17 +3929,12 @@ public void updateClassifications(EntityMutationContext context, String guid, Li if (updatedTagPropagation) { if (updatedRestrictPropagationThroughLineage != null && !currentRestrictPropagationThroughLineage && updatedRestrictPropagationThroughLineage) { deleteDelegate.getHandler().removeTagPropagation(classificationVertex); - } if (updatedRestrictPropagationThroughHierarchy != null && !currentRestrictPropagationThroughHierarchy && updatedRestrictPropagationThroughHierarchy) { deleteDelegate.getHandler().removeTagPropagation(classificationVertex); } if (CollectionUtils.isEmpty(entitiesToPropagateTo)) { - String propagationMode; - if (updatedRemovePropagations !=null) { - propagationMode = entityRetriever.determinePropagationMode(updatedRestrictPropagationThroughLineage, updatedRestrictPropagationThroughHierarchy); - } - else{ + if (updatedRemovePropagations ==null) { propagationMode = CLASSIFICATION_PROPAGATION_MODE_DEFAULT; } Boolean toExclude = propagationMode == CLASSIFICATION_VERTEX_RESTRICT_PROPAGATE_THROUGH_LINEAGE ? true : false; diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphRetriever.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphRetriever.java index 691b5ab75f..90e041b473 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphRetriever.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphRetriever.java @@ -1019,8 +1019,13 @@ private AtlasEntityHeader mapVertexToAtlasEntityHeader(AtlasVertex entityVertex, ret.setTypeName(typeName); ret.setGuid(guid); ret.setStatus(GraphHelper.getStatus(entityVertex)); - if(RequestContext.get().includeClassifications()){ - ret.setClassificationNames(getAllTraitNames(entityVertex)); + RequestContext context = RequestContext.get(); + boolean includeClassifications = context.includeClassifications(); + boolean includeClassificationNames = context.isIncludeClassificationNames(); + if(includeClassifications){ + ret.setClassificationNames(getAllTraitNamesFromAttribute(entityVertex)); + } else if (!includeClassifications && includeClassificationNames) { + ret.setClassificationNames(getAllTraitNamesFromAttribute(entityVertex)); } ret.setIsIncomplete(isIncomplete); ret.setLabels(getLabels(entityVertex)); diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/MigrationService.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/MigrationService.java new file mode 100644 index 0000000000..9e93999cd9 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/MigrationService.java @@ -0,0 +1,5 @@ +package org.apache.atlas.repository.store.graph.v2; + +public interface MigrationService extends Runnable { + void startMigration() throws Exception; +} diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/MigrationImport.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/MigrationImport.java index 0cc7c4a318..9edbfc1cc3 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/MigrationImport.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/MigrationImport.java @@ -86,7 +86,7 @@ public EntityMutationResponse run(EntityImportStream entityStream, AtlasImportRe return ret; } - private DataMigrationStatusService createMigrationStatusService(AtlasImportResult importResult) { + private DataMigrationStatusService createMigrationStatusService(AtlasImportResult importResult) throws AtlasBaseException { DataMigrationStatusService dataMigrationStatusService = new DataMigrationStatusService(); dataMigrationStatusService.init(importResult.getRequest().getOptions().get(AtlasImportRequest.OPTION_KEY_MIGRATION_FILE_NAME)); return dataMigrationStatusService; diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/AuthPolicyPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/AuthPolicyPreProcessor.java index acc28ba5fe..53c78ded9a 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/AuthPolicyPreProcessor.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/AuthPolicyPreProcessor.java @@ -24,7 +24,6 @@ import org.apache.atlas.authorize.AtlasEntityAccessRequest; import org.apache.atlas.authorize.AtlasPrivilege; import org.apache.atlas.exception.AtlasBaseException; -import org.apache.atlas.featureflag.FeatureFlagStore; import org.apache.atlas.model.instance.AtlasEntity; import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; import org.apache.atlas.model.instance.AtlasEntityHeader; @@ -43,44 +42,39 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Set; +import java.util.*; import java.util.stream.Collectors; import static org.apache.atlas.AtlasErrorCode.BAD_REQUEST; import static org.apache.atlas.AtlasErrorCode.INSTANCE_BY_UNIQUE_ATTRIBUTE_NOT_FOUND; import static org.apache.atlas.AtlasErrorCode.INSTANCE_GUID_NOT_FOUND; +import static org.apache.atlas.AtlasErrorCode.OPERATION_NOT_SUPPORTED; import static org.apache.atlas.AtlasErrorCode.RESOURCE_NOT_FOUND; import static org.apache.atlas.AtlasErrorCode.UNAUTHORIZED_CONNECTION_ADMIN; import static org.apache.atlas.authorize.AtlasAuthorizationUtils.getCurrentUserName; import static org.apache.atlas.authorize.AtlasAuthorizationUtils.verifyAccess; import static org.apache.atlas.model.instance.EntityMutations.EntityOperation.CREATE; import static org.apache.atlas.model.instance.EntityMutations.EntityOperation.UPDATE; -import static org.apache.atlas.repository.Constants.ATTR_ADMIN_ROLES; -import static org.apache.atlas.repository.Constants.KEYCLOAK_ROLE_ADMIN; -import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; +import static org.apache.atlas.repository.Constants.*; import static org.apache.atlas.repository.util.AccessControlUtils.*; +import static org.apache.atlas.repository.util.AccessControlUtils.POLICY_SERVICE_NAME_ABAC; import static org.apache.atlas.repository.util.AccessControlUtils.getPolicySubCategory; public class AuthPolicyPreProcessor implements PreProcessor { private static final Logger LOG = LoggerFactory.getLogger(AuthPolicyPreProcessor.class); + public static final String ENTITY_DEFAULT_DOMAIN_SUPER = "entity:default/domain/*/super"; private final AtlasGraph graph; private final AtlasTypeRegistry typeRegistry; private final EntityGraphRetriever entityRetriever; - private final FeatureFlagStore featureFlagStore ; private IndexAliasStore aliasStore; public AuthPolicyPreProcessor(AtlasGraph graph, AtlasTypeRegistry typeRegistry, - EntityGraphRetriever entityRetriever, - FeatureFlagStore featureFlagStore) { + EntityGraphRetriever entityRetriever) { this.graph = graph; this.typeRegistry = typeRegistry; this.entityRetriever = entityRetriever; - this.featureFlagStore = featureFlagStore; aliasStore = new ESAliasStore(graph, entityRetriever); } @@ -108,7 +102,35 @@ private void processCreatePolicy(AtlasStruct entity) throws AtlasBaseException { AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processCreatePolicy"); AtlasEntity policy = (AtlasEntity) entity; + AtlasEntityWithExtInfo parent = getAccessControlEntity(policy); + AtlasEntity parentEntity = null; + if (parent != null) { + parentEntity = parent.getEntity(); + verifyParentTypeName(parentEntity); + } + + String policyServiceName = getPolicyServiceName(policy); String policyCategory = getPolicyCategory(policy); + + if (POLICY_SERVICE_NAME_ABAC.equals(policyServiceName) && + (POLICY_CATEGORY_PERSONA.equals(policyCategory) || POLICY_CATEGORY_PURPOSE.equals(policyCategory))) { + + policy.setAttribute(QUALIFIED_NAME, String.format("%s/%s", getEntityQualifiedName(parentEntity), getUUID())); + + //extract role + String roleName = getPersonaRoleName(parentEntity); + List roles = Arrays.asList(roleName); + policy.setAttribute(ATTR_POLICY_ROLES, roles); + + policy.setAttribute(ATTR_POLICY_USERS, new ArrayList<>()); + policy.setAttribute(ATTR_POLICY_GROUPS, new ArrayList<>()); + + //aliasStore.updateAlias(parentEntity, policy); + + return; + } + + if (StringUtils.isEmpty(policyCategory)) { throw new AtlasBaseException(BAD_REQUEST, "Please provide attribute " + ATTR_POLICY_CATEGORY); } @@ -117,14 +139,13 @@ private void processCreatePolicy(AtlasStruct entity) throws AtlasBaseException { AuthPolicyValidator validator = new AuthPolicyValidator(entityRetriever); if (POLICY_CATEGORY_PERSONA.equals(policyCategory)) { - AtlasEntityWithExtInfo parent = getAccessControlEntity(policy); - AtlasEntity parentEntity = parent.getEntity(); - String policySubCategory = getPolicySubCategory(policy); if (!POLICY_SUB_CATEGORY_DOMAIN.equals(policySubCategory)) { validator.validate(policy, null, parentEntity, CREATE); validateConnectionAdmin(policy); + } else { + validateAndReduce(policy); } policy.setAttribute(QUALIFIED_NAME, String.format("%s/%s", getEntityQualifiedName(parentEntity), getUUID())); @@ -142,9 +163,6 @@ private void processCreatePolicy(AtlasStruct entity) throws AtlasBaseException { aliasStore.updateAlias(parent, policy); } else if (POLICY_CATEGORY_PURPOSE.equals(policyCategory)) { - AtlasEntityWithExtInfo parent = getAccessControlEntity(policy); - AtlasEntity parentEntity = parent.getEntity(); - policy.setAttribute(QUALIFIED_NAME, String.format("%s/%s", getEntityQualifiedName(parentEntity), getUUID())); validator.validate(policy, null, parentEntity, CREATE); @@ -166,9 +184,30 @@ private void processCreatePolicy(AtlasStruct entity) throws AtlasBaseException { RequestContext.get().endMetricRecord(metricRecorder); } + + private void validateAndReduce(AtlasEntity policy) { + List resources = (List) policy.getAttribute(ATTR_POLICY_RESOURCES); + boolean hasAllDomainPattern = resources.stream().anyMatch(resource -> + resource.equals("entity:*") || + resource.equals("entity:*/super") || + resource.equals(ENTITY_DEFAULT_DOMAIN_SUPER) + ); + + if (hasAllDomainPattern) { + policy.setAttribute(ATTR_POLICY_RESOURCES, Collections.singletonList(ENTITY_DEFAULT_DOMAIN_SUPER)); + } + } + + private void processUpdatePolicy(AtlasStruct entity, AtlasVertex vertex) throws AtlasBaseException { AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processUpdatePolicy"); AtlasEntity policy = (AtlasEntity) entity; + + String policyServiceName = getPolicyServiceName(policy); + if (POLICY_SERVICE_NAME_ABAC.equals(policyServiceName)) { + return; + } + AtlasEntity existingPolicy = entityRetriever.toAtlasEntityWithExtInfo(vertex).getEntity(); String policyCategory = policy.hasAttribute(ATTR_POLICY_CATEGORY) ? getPolicyCategory(policy) : getPolicyCategory(existingPolicy); @@ -183,6 +222,8 @@ private void processUpdatePolicy(AtlasStruct entity, AtlasVertex vertex) throws if (!POLICY_SUB_CATEGORY_DOMAIN.equals(policySubCategory)) { validator.validate(policy, existingPolicy, parentEntity, UPDATE); validateConnectionAdmin(policy); + } else { + validateAndReduce(policy); } String qName = getEntityQualifiedName(existingPolicy); @@ -238,6 +279,11 @@ public void processDelete(AtlasVertex vertex) throws AtlasBaseException { try { AtlasEntity policy = entityRetriever.toAtlasEntity(vertex); + String policyServiceName = getPolicyServiceName(policy); + if (POLICY_SERVICE_NAME_ABAC.equals(policyServiceName)) { + return; + } + authorizeDeleteAuthPolicy(policy); if(!policy.getStatus().equals(AtlasEntity.Status.ACTIVE)) { @@ -323,4 +369,10 @@ private AtlasEntityWithExtInfo getAccessControlEntity(AtlasEntity entity) throws RequestContext.get().endMetricRecord(metricRecorder); return ret; } + + private void verifyParentTypeName(AtlasEntity parentEntity) throws AtlasBaseException { + if (parentEntity.getTypeName().equals(STAKEHOLDER_ENTITY_TYPE)) { + throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Updating policies for " + STAKEHOLDER_ENTITY_TYPE); + } + } } diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/ConnectionPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/ConnectionPreProcessor.java index 02fb63bbc8..9b0b83e8fd 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/ConnectionPreProcessor.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/ConnectionPreProcessor.java @@ -6,9 +6,7 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * + * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -17,6 +15,7 @@ */ package org.apache.atlas.repository.store.graph.v2.preprocessor; +import org.apache.atlas.AtlasErrorCode; import org.apache.atlas.DeleteType; import org.apache.atlas.RequestContext; import org.apache.atlas.discovery.EntityDiscoveryService; @@ -47,22 +46,15 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.stream.Collectors; +import java.util.stream.Stream; +import static org.apache.atlas.auth.client.keycloak.AtlasKeycloakClient.getKeycloakClient; import static org.apache.atlas.authorize.AtlasAuthorizerFactory.ATLAS_AUTHORIZER_IMPL; import static org.apache.atlas.authorize.AtlasAuthorizerFactory.CURRENT_AUTHORIZER_IMPL; -import static org.apache.atlas.repository.Constants.ATTR_ADMIN_GROUPS; -import static org.apache.atlas.repository.Constants.ATTR_ADMIN_ROLES; -import static org.apache.atlas.repository.Constants.ATTR_ADMIN_USERS; -import static org.apache.atlas.repository.Constants.CREATED_BY_KEY; -import static org.apache.atlas.repository.Constants.POLICY_ENTITY_TYPE; -import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; +import static org.apache.atlas.repository.Constants.*; import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; -import static org.apache.atlas.auth.client.keycloak.AtlasKeycloakClient.getKeycloakClient; public class ConnectionPreProcessor implements PreProcessor { private static final Logger LOG = LoggerFactory.getLogger(ConnectionPreProcessor.class); @@ -154,60 +146,87 @@ private void processCreateConnection(AtlasStruct struct) throws AtlasBaseExcepti } } - private void processUpdateConnection(EntityMutationContext context, - AtlasStruct entity) throws AtlasBaseException { - + private void processUpdateConnection(EntityMutationContext context, AtlasStruct entity) throws AtlasBaseException { AtlasEntity connection = (AtlasEntity) entity; - if (ATLAS_AUTHORIZER_IMPL.equalsIgnoreCase(CURRENT_AUTHORIZER_IMPL)) { AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processUpdateConnection"); - AtlasVertex vertex = context.getVertex(connection.getGuid()); AtlasEntity existingConnEntity = entityRetriever.toAtlasEntity(vertex); - String roleName = String.format(CONN_NAME_PATTERN, connection.getGuid()); - String vertexQName = vertex.getProperty(QUALIFIED_NAME, String.class); entity.setAttribute(QUALIFIED_NAME, vertexQName); - RoleRepresentation representation = getKeycloakClient().getRoleByName(roleName); - String creatorUser = vertex.getProperty(CREATED_BY_KEY, String.class); - - if (connection.hasAttribute(ATTR_ADMIN_USERS)) { - List newAdminUsers = (List) connection.getAttribute(ATTR_ADMIN_USERS); - List currentAdminUsers = (List) existingConnEntity.getAttribute(ATTR_ADMIN_USERS); - if (StringUtils.isNotEmpty(creatorUser) && !newAdminUsers.contains(creatorUser)) { - newAdminUsers.add(creatorUser); - } - - connection.setAttribute(ATTR_ADMIN_USERS, newAdminUsers); - if (CollectionUtils.isNotEmpty(newAdminUsers) || CollectionUtils.isNotEmpty(currentAdminUsers)) { - keycloakStore.updateRoleUsers(roleName, currentAdminUsers, newAdminUsers, representation); - } - } + //optional is used here to distinguish if the admin related attributes are set in request body or not (else part) + //if set, check for empty list so that appropriate error can be thrown + List newAdminUsers = getAttributeList(connection, ATTR_ADMIN_USERS).orElse(null); + List currentAdminUsers = getAttributeList(existingConnEntity, ATTR_ADMIN_USERS).orElseGet(ArrayList::new); + + List newAdminGroups = getAttributeList(connection, ATTR_ADMIN_GROUPS).orElse(null); + List currentAdminGroups = getAttributeList(existingConnEntity, ATTR_ADMIN_GROUPS).orElseGet(ArrayList::new); + + List newAdminRoles = getAttributeList(connection, ATTR_ADMIN_ROLES).orElse(null); + List currentAdminRoles = getAttributeList(existingConnEntity, ATTR_ADMIN_ROLES).orElseGet(ArrayList::new); - if (connection.hasAttribute(ATTR_ADMIN_GROUPS)) { - List newAdminGroups = (List) connection.getAttribute(ATTR_ADMIN_GROUPS); - List currentAdminGroups = (List) existingConnEntity.getAttribute(ATTR_ADMIN_GROUPS); + // Check conditions and throw exceptions as necessary - if (CollectionUtils.isNotEmpty(newAdminGroups) || CollectionUtils.isNotEmpty(currentAdminGroups)) { - keycloakStore.updateRoleGroups(roleName, currentAdminGroups, newAdminGroups, representation); - } + // If all new admin attributes are null, no action required as these are not meant to update in the request + if (newAdminUsers == null && newAdminGroups == null && newAdminRoles == null) { + RequestContext.get().endMetricRecord(metricRecorder); + return; } - if (connection.hasAttribute(ATTR_ADMIN_ROLES)) { - List newAdminRoles = (List) connection.getAttribute(ATTR_ADMIN_ROLES); - List currentAdminRoles = (List) existingConnEntity.getAttribute(ATTR_ADMIN_ROLES); + // Throw exception if all new admin attributes are empty but not null + boolean emptyName = newAdminUsers != null && newAdminUsers.isEmpty(); + boolean emptyGroup = newAdminGroups != null && newAdminGroups.isEmpty(); + boolean emptyRole = newAdminRoles != null && newAdminRoles.isEmpty(); - if (CollectionUtils.isNotEmpty(newAdminRoles) || CollectionUtils.isNotEmpty(currentAdminRoles)) { - keycloakStore.updateRoleRoles(roleName, currentAdminRoles, newAdminRoles, representation); - } + if (emptyName && emptyGroup && emptyRole) { + throw new AtlasBaseException(AtlasErrorCode.ADMIN_LIST_SHOULD_NOT_BE_EMPTY, existingConnEntity.getTypeName()); + } + // Update Keycloak roles + RoleRepresentation representation = getKeycloakClient().getRoleByName(roleName); + List finalStateUsers = determineFinalState(newAdminUsers, currentAdminUsers); + List finalStateGroups = determineFinalState(newAdminGroups, currentAdminGroups); + List finalStateRoles = determineFinalState(newAdminRoles, currentAdminRoles); + //this is the case where the final state after comparison with current and new value of all the attributes become empty + if (allEmpty(finalStateUsers, finalStateGroups, finalStateRoles)) { + throw new AtlasBaseException(AtlasErrorCode.ADMIN_LIST_SHOULD_NOT_BE_EMPTY, existingConnEntity.getTypeName()); } + keycloakStore.updateRoleUsers(roleName, currentAdminUsers, finalStateUsers, representation); + keycloakStore.updateRoleGroups(roleName, currentAdminGroups, finalStateGroups, representation); + keycloakStore.updateRoleRoles(roleName, currentAdminRoles, finalStateRoles, representation); + + RequestContext.get().endMetricRecord(metricRecorder); } } + // if the list is null -> we don't want to change + // if the list is empty -> we want to remove all elements + // if the list is non-empty -> we want to replace + private List determineFinalState(List newAdmins, List currentAdmins) { + return newAdmins == null ? currentAdmins : newAdmins; + } + + private boolean allEmpty(List... lists) { + if (lists == null || lists.length == 0) { + return true; + } + return Stream.of(lists).allMatch(list -> list != null && list.isEmpty()); + } + + + private Optional> getAttributeList(AtlasEntity entity, String attributeName) { + if (entity.hasAttribute(attributeName)) { + if (Objects.isNull(entity.getAttribute(attributeName))) { + return Optional.of(new ArrayList<>(0)); + } + return Optional.of((List) entity.getAttribute(attributeName)); + } + return Optional.empty(); + } + @Override public void processDelete(AtlasVertex vertex) throws AtlasBaseException { // Process Delete connection role and policies in case of hard delete or purge @@ -242,7 +261,7 @@ private boolean isDeleteTypeSoft() { private List getConnectionPolicies(String guid, String roleName) throws AtlasBaseException { List ret = new ArrayList<>(); - + IndexSearchParams indexSearchParams = new IndexSearchParams(); Map dsl = new HashMap<>(); diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessor.java index 0ed5191004..f0544abad4 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessor.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessor.java @@ -9,8 +9,7 @@ import java.util.HashSet; import java.util.Set; -import static org.apache.atlas.repository.Constants.ATLAS_GLOSSARY_CATEGORY_ENTITY_TYPE; -import static org.apache.atlas.repository.Constants.ATLAS_GLOSSARY_TERM_ENTITY_TYPE; +import static org.apache.atlas.repository.Constants.*; public interface PreProcessor { @@ -18,6 +17,15 @@ public interface PreProcessor { Set skipInitialAuthCheckTypes = new HashSet() {{ add(ATLAS_GLOSSARY_TERM_ENTITY_TYPE); add(ATLAS_GLOSSARY_CATEGORY_ENTITY_TYPE); + add(STAKEHOLDER_ENTITY_TYPE); + add(STAKEHOLDER_TITLE_ENTITY_TYPE); + add(DATA_DOMAIN_ENTITY_TYPE); + add(DATA_PRODUCT_ENTITY_TYPE); + }}; + + Set skipUpdateAuthCheckTypes = new HashSet() {{ + add(DATA_DOMAIN_ENTITY_TYPE); + add(DATA_PRODUCT_ENTITY_TYPE); }}; void processAttributes(AtlasStruct entity, EntityMutationContext context, EntityMutations.EntityOperation operation) throws AtlasBaseException; diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessorUtils.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessorUtils.java index 6c84900460..b36c8babe6 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessorUtils.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessorUtils.java @@ -1,7 +1,12 @@ package org.apache.atlas.repository.store.graph.v2.preprocessor; +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.RequestContext; +import org.apache.atlas.discovery.EntityDiscoveryService; import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.discovery.IndexSearchParams; import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasEntityHeader; import org.apache.atlas.model.instance.AtlasObjectId; import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; @@ -10,14 +15,22 @@ import org.apache.atlas.type.AtlasStructType; import org.apache.atlas.type.AtlasTypeRegistry; import org.apache.atlas.util.NanoIdUtils; +import org.apache.atlas.util.lexoRank.LexoRank; import org.apache.atlas.utils.AtlasEntityUtil; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.atlas.repository.Constants.QUERY_COLLECTION_ENTITY_TYPE; -import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; -import static org.apache.atlas.repository.Constants.ENTITY_TYPE_PROPERTY_KEY; +import java.util.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static org.apache.atlas.glossary.GlossaryUtils.ATLAS_GLOSSARY_CATEGORY_TYPENAME; +import static org.apache.atlas.glossary.GlossaryUtils.ATLAS_GLOSSARY_TERM_TYPENAME; +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; +import static org.apache.atlas.type.Constants.LEXICOGRAPHICAL_SORT_ORDER; public class PreProcessorUtils { private static final Logger LOG = LoggerFactory.getLogger(PreProcessorUtils.class); @@ -31,6 +44,44 @@ public class PreProcessorUtils { public static final String CATEGORY_CHILDREN = "childrenCategories"; public static final String GLOSSARY_TERM_REL_TYPE = "AtlasGlossaryTermAnchor"; public static final String GLOSSARY_CATEGORY_REL_TYPE = "AtlasGlossaryCategoryAnchor"; + public static final String INIT_LEXORANK_OFFSET = "0|100000:"; + public static final String INIT_TERM_LEXORANK_OFFSET = "0|500000:"; + + //DataMesh models constants + public static final String PARENT_DOMAIN_REL_TYPE = "parentDomain"; + public static final String SUB_DOMAIN_REL_TYPE = "subDomains"; + public static final String DATA_PRODUCT_REL_TYPE = "dataProducts"; + public static final String MIGRATION_CUSTOM_ATTRIBUTE = "isQualifiedNameMigrated"; + public static final String DATA_DOMAIN_REL_TYPE = "dataDomain"; + public static final String STAKEHOLDER_REL_TYPE = "stakeholders"; + + public static final String MESH_POLICY_CATEGORY = "datamesh"; + + public static final String DATA_PRODUCT_EDGE_LABEL = "__DataDomain.dataProducts"; + public static final String DOMAIN_PARENT_EDGE_LABEL = "__DataDomain.subDomains"; + public static final String STAKEHOLDER_EDGE_LABEL = "__DataDomain.stakeholders"; + public static final String STAKEHOLDER_TITLE_EDGE_LABEL = "__StakeholderTitle.stakeholders"; + + + public static final String PARENT_DOMAIN_QN_ATTR = "parentDomainQualifiedName"; + public static final String SUPER_DOMAIN_QN_ATTR = "superDomainQualifiedName"; + public static final String DAAP_VISIBILITY_ATTR = "daapVisibility"; + public static final String DAAP_VISIBILITY_USERS_ATTR = "daapVisibilityUsers"; + public static final String DAAP_VISIBILITY_GROUPS_ATTR = "daapVisibilityGroups"; + public static final String OUTPUT_PORT_GUIDS_ATTR = "daapOutputPortGuids"; + public static final String INPUT_PORT_GUIDS_ATTR = "daapInputPortGuids"; + public static final String DAAP_STATUS_ATTR = "daapStatus"; + public static final String DAAP_ARCHIVED_STATUS = "Archived"; + + //Migration Constants + public static final String MIGRATION_TYPE_PREFIX = "MIGRATION:"; + public static final String DATA_MESH_QN = MIGRATION_TYPE_PREFIX + "DATA_MESH_QN"; + + public enum MigrationStatus { + IN_PROGRESS, + SUCCESSFUL, + FAILED; + } //Query models constants public static final String PREFIX_QUERY_QN = "default/collection/"; @@ -44,6 +95,13 @@ public class PreProcessorUtils { public static final String CHILDREN_QUERIES = "__Namespace.childrenQueries"; public static final String CHILDREN_FOLDERS = "__Namespace.childrenFolders"; + public static final int REBALANCING_TRIGGER = 119; + public static final int PRE_DELIMITER_LENGTH = 9; + public static final String LEXORANK_HARD_LIMIT = "" + (256 - PRE_DELIMITER_LENGTH); + public static final String LEXORANK_VALID_REGEX = "^0\\|[0-9a-z]{6}:(?:[0-9a-z]{0," + LEXORANK_HARD_LIMIT + "})?$"; + public static final Set ATTRIBUTES = new HashSet<>(Arrays.asList("lexicographicalSortOrder")); + + public static final Pattern LEXORANK_VALIDITY_PATTERN = Pattern.compile(LEXORANK_VALID_REGEX); public static String getUUID(){ return NanoIdUtils.randomNanoId(); @@ -107,4 +165,228 @@ public static String updateQueryResourceAttributes(AtlasTypeRegistry typeRegistr return newCollectionQualifiedName; } + + public static List indexSearchPaginated(Map dsl, Set attributes, EntityDiscoveryService discovery) throws AtlasBaseException { + IndexSearchParams searchParams = new IndexSearchParams(); + List ret = new ArrayList<>(); + + if (CollectionUtils.isNotEmpty(attributes)) { + searchParams.setAttributes(attributes); + } + + List sortList = new ArrayList<>(0); + sortList.add(mapOf("__timestamp", mapOf("order", "asc"))); + sortList.add(mapOf("__guid", mapOf("order", "asc"))); + dsl.put("sort", sortList); + + int from = 0; + int size = 100; + boolean hasMore = true; + do { + dsl.put("from", from); + dsl.put("size", size); + searchParams.setDsl(dsl); + + List headers = discovery.directIndexSearch(searchParams).getEntities(); + + if (CollectionUtils.isNotEmpty(headers)) { + ret.addAll(headers); + } else { + hasMore = false; + } + + from += size; + + } while (hasMore); + + return ret; + } + + public static void verifyDuplicateAssetByName(String typeName, String assetName, EntityDiscoveryService discovery, String errorMessage) throws AtlasBaseException { + List> mustClauseList = new ArrayList(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", typeName))); + mustClauseList.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustClauseList.add(mapOf("term", mapOf("name.keyword", assetName))); + + + Map bool = mapOf("must", mustClauseList); + + Map dsl = mapOf("query", mapOf("bool", bool)); + + List assets = indexSearchPaginated(dsl, null, discovery); + + if (CollectionUtils.isNotEmpty(assets)) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, errorMessage); + } + } + + public static void isValidLexoRank(String inputLexorank, String glossaryQualifiedName, String parentQualifiedName, EntityDiscoveryService discovery) throws AtlasBaseException { + + Matcher matcher = LEXORANK_VALIDITY_PATTERN.matcher(inputLexorank); + + if(!matcher.matches() || StringUtils.isEmpty(inputLexorank)){ + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Invalid value for lexicographicalSortOrder attribute"); + } + // TODO : Need to discuss either to remove this after migration is successful on all tenants and custom-sort is successfully GA or keep it for re-balancing WF + Boolean requestFromMigration = RequestContext.get().getRequestContextHeaders().getOrDefault("x-atlan-request-id", "").contains("custom-sort-migration"); + if(requestFromMigration) { + return; + } + Map lexoRankCache = RequestContext.get().getLexoRankCache(); + if(Objects.isNull(lexoRankCache)) { + lexoRankCache = new HashMap<>(); + } + String cacheKey = glossaryQualifiedName + "-" + parentQualifiedName; + if(lexoRankCache.containsKey(cacheKey) && lexoRankCache.get(cacheKey).equals(inputLexorank)){ + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Duplicate value for the attribute :" + LEXICOGRAPHICAL_SORT_ORDER +" found"); + } + Map dslQuery = createDSLforCheckingPreExistingLexoRank(inputLexorank, glossaryQualifiedName, parentQualifiedName); + List assetsWithDuplicateRank = new ArrayList<>(); + try { + IndexSearchParams searchParams = new IndexSearchParams(); + searchParams.setDsl(dslQuery); + assetsWithDuplicateRank = discovery.directIndexSearch(searchParams).getEntities(); + } catch (AtlasBaseException e) { + LOG.error("IndexSearch Error Occured : " + e.getMessage()); + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Something went wrong with IndexSearch"); + } + + if (!CollectionUtils.isEmpty(assetsWithDuplicateRank)) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Duplicate Lexorank found"); + } + + lexoRankCache.put(cacheKey, inputLexorank); + RequestContext.get().setLexoRankCache(lexoRankCache); + // TODO : Add the rebalancing logic here +// int colonIndex = inputLexorank.indexOf(":"); +// if (colonIndex != -1 && inputLexorank.substring(colonIndex + 1).length() >= REBALANCING_TRIGGER) { + // Rebalancing trigger +// } + } + + public static void assignNewLexicographicalSortOrder(AtlasEntity entity, String glossaryQualifiedName, String parentQualifiedName, EntityDiscoveryService discovery) throws AtlasBaseException{ + Map lexoRankCache = RequestContext.get().getLexoRankCache(); + + if(Objects.isNull(lexoRankCache)) { + lexoRankCache = new HashMap<>(); + } + String lexoRank = ""; + String lastLexoRank = ""; + String cacheKey = glossaryQualifiedName + "-" + parentQualifiedName; + + if(lexoRankCache.containsKey(cacheKey)) { + lastLexoRank = lexoRankCache.get(cacheKey); + } else { + + List categories = null; + Map dslQuery = generateDSLQueryForLastChild(glossaryQualifiedName, parentQualifiedName); + try { + IndexSearchParams searchParams = new IndexSearchParams(); + searchParams.setAttributes(ATTRIBUTES); + searchParams.setDsl(dslQuery); + categories = discovery.directIndexSearch(searchParams).getEntities(); + } catch (AtlasBaseException e) { + e.printStackTrace(); + throw new AtlasBaseException("Something went wrong in assigning lexicographicalSortOrder"); + } + + if (CollectionUtils.isNotEmpty(categories)) { + AtlasEntityHeader category = categories.get(0); + String lexicographicalSortOrder = (String) category.getAttribute(LEXICOGRAPHICAL_SORT_ORDER); + if (StringUtils.isNotEmpty(lexicographicalSortOrder)) { + lastLexoRank = lexicographicalSortOrder; + } else { + lastLexoRank = INIT_LEXORANK_OFFSET; + } + } else { + lastLexoRank = INIT_LEXORANK_OFFSET; + } + } + + LexoRank parsedLexoRank = LexoRank.parse(lastLexoRank); + LexoRank nextLexoRank = parsedLexoRank.genNext().genNext(); + lexoRank = nextLexoRank.toString(); + + entity.setAttribute(LEXICOGRAPHICAL_SORT_ORDER, lexoRank); + lexoRankCache.put(cacheKey, lexoRank); + RequestContext.get().setLexoRankCache(lexoRankCache); + } + + public static Map createDSLforCheckingPreExistingLexoRank(String lexoRank, String glossaryQualifiedName, String parentQualifiedName) { + + Map boolMap = buildBoolQueryDuplicateLexoRank(lexoRank, glossaryQualifiedName, parentQualifiedName); + + Map dsl = new HashMap<>(); + dsl.put("from", 0); + dsl.put("size", 1); + dsl.put("query", mapOf("bool", boolMap)); + + return dsl; + } + + private static Map buildBoolQueryDuplicateLexoRank(String lexoRank, String glossaryQualifiedName, String parentQualifiedName) { + Map boolFilter = new HashMap<>(); + List> mustArray = new ArrayList<>(); + mustArray.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustArray.add(mapOf("term", mapOf(LEXICOGRAPHICAL_SORT_ORDER, lexoRank))); + if(StringUtils.isNotEmpty(glossaryQualifiedName)) { + mustArray.add(mapOf("terms", mapOf("__typeName.keyword", Arrays.asList(ATLAS_GLOSSARY_TERM_TYPENAME, ATLAS_GLOSSARY_CATEGORY_TYPENAME)))); + mustArray.add(mapOf("term", mapOf("__glossary", glossaryQualifiedName))); + if(StringUtils.isEmpty(parentQualifiedName)) { + boolFilter.put("must_not", Arrays.asList(mapOf("exists", mapOf("field", "__categories")),mapOf("exists", mapOf("field", "__parentCategory")))); + } else { + List> shouldParentArray = new ArrayList<>(); + shouldParentArray.add(mapOf("term", mapOf("__categories", parentQualifiedName))); + shouldParentArray.add(mapOf("term", mapOf("__parentCategory", parentQualifiedName))); + mustArray.add(mapOf("bool",mapOf("should", shouldParentArray))); + } + } else{ + mustArray.add(mapOf("terms", mapOf("__typeName.keyword", Arrays.asList(ATLAS_GLOSSARY_ENTITY_TYPE)))); + } + + boolFilter.put("must", mustArray); + + return boolFilter; + } + + public static Map generateDSLQueryForLastChild(String glossaryQualifiedName, String parentQualifiedName) { + + Map sortKeyOrder = mapOf(LEXICOGRAPHICAL_SORT_ORDER, mapOf("order", "desc")); + + Object[] sortArray = {sortKeyOrder}; + + Map boolMap = buildBoolQuery(glossaryQualifiedName, parentQualifiedName); + + Map dsl = new HashMap<>(); + dsl.put("from", 0); + dsl.put("size", 1); + dsl.put("sort", sortArray); + dsl.put("query", mapOf("bool", boolMap)); + + return dsl; + } + + private static Map buildBoolQuery(String glossaryQualifiedName, String parentQualifiedName) { + Map boolFilter = new HashMap<>(); + List> mustArray = new ArrayList<>(); + mustArray.add(mapOf("term", mapOf("__state", "ACTIVE"))); + if(StringUtils.isNotEmpty(glossaryQualifiedName)) { + mustArray.add(mapOf("terms", mapOf("__typeName.keyword", Arrays.asList("AtlasGlossaryTerm", "AtlasGlossaryCategory")))); + mustArray.add(mapOf("term", mapOf("__glossary", glossaryQualifiedName))); + if(StringUtils.isEmpty(parentQualifiedName)) { + boolFilter.put("must_not", Arrays.asList(mapOf("exists", mapOf("field", "__categories")),mapOf("exists", mapOf("field", "__parentCategory")))); + } else { + List> shouldParentArray = new ArrayList<>(); + shouldParentArray.add(mapOf("term", mapOf("__categories", parentQualifiedName))); + shouldParentArray.add(mapOf("term", mapOf("__parentCategory", parentQualifiedName))); + mustArray.add(mapOf("bool",mapOf("should", shouldParentArray))); + } + } else{ + mustArray.add(mapOf("terms", mapOf("__typeName.keyword", Arrays.asList("AtlasGlossary")))); + } + + boolFilter.put("must", mustArray); + + return boolFilter; + } } diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/accesscontrol/PersonaPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/accesscontrol/PersonaPreProcessor.java index 3541e3e4a7..222bca0962 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/accesscontrol/PersonaPreProcessor.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/accesscontrol/PersonaPreProcessor.java @@ -19,6 +19,7 @@ import org.apache.atlas.RequestContext; +import org.apache.atlas.discovery.EntityDiscoveryService; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.auth.client.keycloak.AtlasKeycloakClient; import org.apache.atlas.model.instance.AtlasEntity; @@ -73,12 +74,12 @@ public class PersonaPreProcessor implements PreProcessor { private static final Logger LOG = LoggerFactory.getLogger(PersonaPreProcessor.class); - private final AtlasGraph graph; - private final AtlasTypeRegistry typeRegistry; - private final EntityGraphRetriever entityRetriever; - private IndexAliasStore aliasStore; - private AtlasEntityStore entityStore; - private KeycloakStore keycloakStore; + protected final AtlasGraph graph; + protected AtlasTypeRegistry typeRegistry; + protected final EntityGraphRetriever entityRetriever; + protected IndexAliasStore aliasStore; + protected AtlasEntityStore entityStore; + protected KeycloakStore keycloakStore; public PersonaPreProcessor(AtlasGraph graph, AtlasTypeRegistry typeRegistry, @@ -209,7 +210,7 @@ private void updatePoliciesIsEnabledAttr(EntityMutationContext context, AtlasEnt } } - private String createKeycloakRole(AtlasEntity entity) throws AtlasBaseException { + protected String createKeycloakRole(AtlasEntity entity) throws AtlasBaseException { String roleName = getPersonaRoleName(entity); List users = getPersonaUsers(entity); List groups = getPersonaGroups(entity); @@ -228,7 +229,7 @@ private String createKeycloakRole(AtlasEntity entity) throws AtlasBaseException return role.getId(); } - private void updateKeycloakRole(AtlasEntity newPersona, AtlasEntity existingPersona) throws AtlasBaseException { + protected void updateKeycloakRole(AtlasEntity newPersona, AtlasEntity existingPersona) throws AtlasBaseException { String roleId = getPersonaRoleId(existingPersona); String roleName = getPersonaRoleName(existingPersona); diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/accesscontrol/StakeholderPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/accesscontrol/StakeholderPreProcessor.java new file mode 100644 index 0000000000..1cba29f935 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/accesscontrol/StakeholderPreProcessor.java @@ -0,0 +1,321 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.atlas.repository.store.graph.v2.preprocessor.accesscontrol; + + +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.AtlasException; +import org.apache.atlas.RequestContext; +import org.apache.atlas.authorize.AtlasAuthorizationUtils; +import org.apache.atlas.authorize.AtlasEntityAccessRequest; +import org.apache.atlas.authorize.AtlasPrivilege; +import org.apache.atlas.discovery.EntityDiscoveryService; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasEntityHeader; +import org.apache.atlas.model.instance.AtlasObjectId; +import org.apache.atlas.model.instance.AtlasStruct; +import org.apache.atlas.model.instance.EntityMutations; +import org.apache.atlas.repository.graphdb.AtlasGraph; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.store.graph.AtlasEntityStore; +import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; +import org.apache.atlas.repository.store.graph.v2.EntityMutationContext; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import static java.lang.String.format; +import static org.apache.atlas.AtlasErrorCode.BAD_REQUEST; +import static org.apache.atlas.AtlasErrorCode.OPERATION_NOT_SUPPORTED; +import static org.apache.atlas.repository.Constants.NAME; +import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; +import static org.apache.atlas.repository.Constants.STAKEHOLDER_ENTITY_TYPE; +import static org.apache.atlas.repository.Constants.STAKEHOLDER_TITLE_ENTITY_TYPE; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.indexSearchPaginated; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh.StakeholderTitlePreProcessor.*; +import static org.apache.atlas.repository.util.AccessControlUtils.ATTR_ACCESS_CONTROL_ENABLED; +import static org.apache.atlas.repository.util.AccessControlUtils.ATTR_PERSONA_ROLE_ID; +import static org.apache.atlas.repository.util.AccessControlUtils.REL_ATTR_POLICIES; +import static org.apache.atlas.repository.util.AccessControlUtils.getESAliasName; +import static org.apache.atlas.repository.util.AccessControlUtils.getPersonaRoleId; +import static org.apache.atlas.repository.util.AccessControlUtils.getUUID; +import static org.apache.atlas.repository.util.AccessControlUtils.validateNoPoliciesAttached; +import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; + +public class StakeholderPreProcessor extends PersonaPreProcessor { + private static final Logger LOG = LoggerFactory.getLogger(StakeholderPreProcessor.class); + + public static final String ATTR_DOMAIN_QUALIFIED_NAME = "stakeholderDomainQualifiedName"; + public static final String ATTR_STAKEHOLDER_TITLE_GUID = "stakeholderTitleGuid"; + + public static final String REL_ATTR_STAKEHOLDER_TITLE = "stakeholderTitle"; + public static final String REL_ATTR_STAKEHOLDER_DOMAIN = "stakeholderDataDomain"; + + protected EntityDiscoveryService discovery; + + public StakeholderPreProcessor(AtlasGraph graph, + AtlasTypeRegistry typeRegistry, + EntityGraphRetriever entityRetriever, + AtlasEntityStore entityStore) { + super(graph, typeRegistry, entityRetriever, entityStore); + + try { + this.discovery = new EntityDiscoveryService(typeRegistry, graph, null, null, null, null); + } catch (AtlasException e) { + e.printStackTrace(); + } + } + + @Override + public void processAttributes(AtlasStruct entityStruct, EntityMutationContext context, + EntityMutations.EntityOperation operation) throws AtlasBaseException { + if (LOG.isDebugEnabled()) { + LOG.debug("StakeholderPreProcessor.processAttributes: pre processing {}, {}", entityStruct.getAttribute(QUALIFIED_NAME), operation); + } + + AtlasEntity entity = (AtlasEntity) entityStruct; + + switch (operation) { + case CREATE: + processCreateStakeholder(entity); + break; + case UPDATE: + processUpdateStakeholder(context, entity); + break; + } + } + + @Override + public void processDelete(AtlasVertex vertex) throws AtlasBaseException { + AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo = entityRetriever.toAtlasEntityWithExtInfo(vertex); + AtlasEntity stakeholder = entityWithExtInfo.getEntity(); + + if(!stakeholder.getStatus().equals(AtlasEntity.Status.ACTIVE)) { + LOG.info("Stakeholder is already deleted/purged"); + return; + } + + //delete policies + List policies = (List) stakeholder.getRelationshipAttribute(REL_ATTR_POLICIES); + if (CollectionUtils.isNotEmpty(policies)) { + for (AtlasObjectId policyObjectId : policies) { + entityStore.deleteById(policyObjectId.getGuid()); + } + } + + //remove role + keycloakStore.removeRole(getPersonaRoleId(stakeholder)); + + //delete ES alias + aliasStore.deleteAlias(getESAliasName(stakeholder)); + } + + private void processCreateStakeholder(AtlasEntity entity) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processCreateStakeholder"); + + validateNoPoliciesAttached(entity); + + if (!entity.hasRelationshipAttribute(REL_ATTR_STAKEHOLDER_TITLE) || !entity.hasRelationshipAttribute(REL_ATTR_STAKEHOLDER_DOMAIN)) { + throw new AtlasBaseException(BAD_REQUEST, + String.format("Relationships %s and %s are mandatory", REL_ATTR_STAKEHOLDER_TITLE, REL_ATTR_STAKEHOLDER_DOMAIN)); + } + + String domainQualifiedName = getQualifiedNameFromRelationAttribute(entity, REL_ATTR_STAKEHOLDER_DOMAIN); + String stakeholderTitleGuid = getGuidFromRelationAttribute(entity, REL_ATTR_STAKEHOLDER_TITLE); + + ensureTitleAvailableForDomain(domainQualifiedName, stakeholderTitleGuid); + + //validate Stakeholder & StakeholderTitle pair is unique for this domain + verifyDuplicateStakeholderByDomainAndTitle(domainQualifiedName, stakeholderTitleGuid); + + //validate Name uniqueness for Stakeholders across this domain + String name = (String) entity.getAttribute(NAME); + verifyDuplicateStakeholderByName(name, domainQualifiedName, discovery); + + entity.setAttribute(ATTR_DOMAIN_QUALIFIED_NAME, domainQualifiedName); + entity.setAttribute(ATTR_STAKEHOLDER_TITLE_GUID, stakeholderTitleGuid); + + String stakeholderQualifiedName = format("default/%s/%s", + getUUID(), + domainQualifiedName); + + entity.setAttribute(QUALIFIED_NAME, stakeholderQualifiedName); + + + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_CREATE, new AtlasEntityHeader(entity)), + "create Stakeholder: ", entity.getAttribute(NAME)); + + entity.setAttribute(ATTR_ACCESS_CONTROL_ENABLED, entity.getAttributes().getOrDefault(ATTR_ACCESS_CONTROL_ENABLED, true)); + + //create keycloak role + String roleId = createKeycloakRole(entity); + + entity.setAttribute(ATTR_PERSONA_ROLE_ID, roleId); + + //create ES alias + aliasStore.createAlias(entity); + + RequestContext.get().endMetricRecord(metricRecorder); + } + + private void processUpdateStakeholder(EntityMutationContext context, AtlasEntity stakeholder) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processUpdateStakeholder"); + + validateNoPoliciesAttached(stakeholder); + + AtlasVertex vertex = context.getVertex(stakeholder.getGuid()); + + AtlasEntity existingStakeholderEntity = entityRetriever.toAtlasEntity(vertex); + + if (!AtlasEntity.Status.ACTIVE.equals(existingStakeholderEntity.getStatus())) { + throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Stakeholder is not Active"); + } + + stakeholder.removeAttribute(ATTR_DOMAIN_QUALIFIED_NAME); + stakeholder.removeAttribute(ATTR_STAKEHOLDER_TITLE_GUID); + stakeholder.removeAttribute(ATTR_PERSONA_ROLE_ID); + + if (MapUtils.isNotEmpty(stakeholder.getRelationshipAttributes())) { + stakeholder.getRelationshipAttributes().remove(REL_ATTR_STAKEHOLDER_DOMAIN); + stakeholder.getRelationshipAttributes().remove(REL_ATTR_STAKEHOLDER_TITLE); + } + + String currentName = vertex.getProperty(NAME, String.class); + String newName = (String) stakeholder.getAttribute(NAME); + + if (!currentName.equals(newName)) { + verifyDuplicateStakeholderByName(newName, (String) existingStakeholderEntity.getAttribute(ATTR_DOMAIN_QUALIFIED_NAME), discovery); + } + + String vertexQName = vertex.getProperty(QUALIFIED_NAME, String.class); + stakeholder.setAttribute(QUALIFIED_NAME, vertexQName); + + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_UPDATE, new AtlasEntityHeader(stakeholder)), + "update Stakeholder: ", stakeholder.getAttribute(NAME)); + + updateKeycloakRole(stakeholder, existingStakeholderEntity); + + RequestContext.get().endMetricRecord(metricRecorder); + } + + private String getGuidFromRelationAttribute(AtlasEntity entity, String relationshipAttributeName) throws AtlasBaseException { + AtlasObjectId relationObjectId = (AtlasObjectId) entity.getRelationshipAttribute(relationshipAttributeName); + + String guid = relationObjectId.getGuid(); + if (StringUtils.isEmpty(guid)) { + AtlasVertex vertex = entityRetriever.getEntityVertex(relationObjectId); + guid = vertex.getProperty("__guid", String.class); + } + + return guid; + } + + private String getQualifiedNameFromRelationAttribute(AtlasEntity entity, String relationshipAttributeName) throws AtlasBaseException { + AtlasObjectId relationObjectId = (AtlasObjectId) entity.getRelationshipAttribute(relationshipAttributeName); + String qualifiedName = null; + + if (relationObjectId.getUniqueAttributes() != null) { + qualifiedName = (String) relationObjectId.getUniqueAttributes().get(QUALIFIED_NAME); + } + + if (StringUtils.isEmpty(qualifiedName)) { + AtlasVertex vertex = entityRetriever.getEntityVertex(relationObjectId); + qualifiedName = vertex.getProperty(QUALIFIED_NAME, String.class); + } + + return qualifiedName; + } + + protected void verifyDuplicateStakeholderByDomainAndTitle(String domainQualifiedName, String stakeholderTitleGuid) throws AtlasBaseException { + + List> mustClauseList = new ArrayList(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", STAKEHOLDER_ENTITY_TYPE))); + mustClauseList.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustClauseList.add(mapOf("term", mapOf(ATTR_DOMAIN_QUALIFIED_NAME, domainQualifiedName))); + mustClauseList.add(mapOf("term", mapOf(ATTR_STAKEHOLDER_TITLE_GUID, stakeholderTitleGuid))); + + + Map bool = mapOf("must", mustClauseList); + Map dsl = mapOf("query", mapOf("bool", bool)); + + List assets = indexSearchPaginated(dsl, null, this.discovery); + + if (CollectionUtils.isNotEmpty(assets)) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, + format("Stakeholder for provided title & domain combination already exists with name: %s", assets.get(0).getAttribute(NAME))); + } + } + + protected void ensureTitleAvailableForDomain(String domainQualifiedName, String stakeholderTitleGuid) throws AtlasBaseException { + + List> mustClauseList = new ArrayList(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", STAKEHOLDER_TITLE_ENTITY_TYPE))); + mustClauseList.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustClauseList.add(mapOf("term", mapOf("__guid", stakeholderTitleGuid))); + + Map bool = mapOf("must", mustClauseList); + Map dsl = mapOf("query", mapOf("bool", bool)); + + List assets = indexSearchPaginated(dsl, Collections.singleton(ATTR_DOMAIN_QUALIFIED_NAMES), this.discovery); + + if (CollectionUtils.isNotEmpty(assets)) { + AtlasEntityHeader stakeholderTitleHeader = assets.get(0); + + List domainQualifiedNames = (List) stakeholderTitleHeader.getAttribute(ATTR_DOMAIN_QUALIFIED_NAMES); + + if (!domainQualifiedNames.contains(STAR) && !domainQualifiedNames.contains(NEW_STAR)) { + Optional parentDomain = domainQualifiedNames.stream().filter(x -> domainQualifiedName.startsWith(x)).findFirst(); + + if (!parentDomain.isPresent()) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Provided StakeholderTitle is not applicable to the domain"); + } + } + } + } + + public static void verifyDuplicateStakeholderByName(String assetName, String domainQualifiedName, EntityDiscoveryService discovery) throws AtlasBaseException { + + List> mustClauseList = new ArrayList(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", STAKEHOLDER_ENTITY_TYPE))); + mustClauseList.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustClauseList.add(mapOf("term", mapOf("name.keyword", assetName))); + mustClauseList.add(mapOf("term", mapOf(ATTR_DOMAIN_QUALIFIED_NAME, domainQualifiedName))); + + + Map bool = mapOf("must", mustClauseList); + Map dsl = mapOf("query", mapOf("bool", bool)); + + List assets = indexSearchPaginated(dsl, null, discovery); + + if (CollectionUtils.isNotEmpty(assets)) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, + format("Stakeholder with name %s already exists for current domain", assetName)); + } + } +} diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/AbstractContractPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/AbstractContractPreProcessor.java new file mode 100644 index 0000000000..a89f4327de --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/AbstractContractPreProcessor.java @@ -0,0 +1,122 @@ +package org.apache.atlas.repository.store.graph.v2.preprocessor.contract; + +import org.apache.atlas.RequestContext; +import org.apache.atlas.authorize.AtlasAuthorizationUtils; +import org.apache.atlas.authorize.AtlasEntityAccessRequest; +import org.apache.atlas.authorize.AtlasPrivilege; +import org.apache.atlas.discovery.EntityDiscoveryService; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.TypeCategory; +import org.apache.atlas.model.discovery.AtlasSearchResult; +import org.apache.atlas.model.discovery.IndexSearchParams; +import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasEntityHeader; +import org.apache.atlas.repository.graphdb.AtlasGraph; +import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; +import org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessor; +import org.apache.atlas.type.AtlasEntityType; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.commons.collections.CollectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static org.apache.atlas.AtlasErrorCode.TYPE_NAME_INVALID; +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; + +public abstract class AbstractContractPreProcessor implements PreProcessor { + private static final Logger LOG = LoggerFactory.getLogger(AbstractContractPreProcessor.class); + + public final AtlasTypeRegistry typeRegistry; + public final EntityGraphRetriever entityRetriever; + public final AtlasGraph graph; + private final EntityDiscoveryService discovery; + + + AbstractContractPreProcessor(AtlasGraph graph, AtlasTypeRegistry typeRegistry, + EntityGraphRetriever entityRetriever, EntityDiscoveryService discovery) { + this.graph = graph; + this.typeRegistry = typeRegistry; + this.entityRetriever = entityRetriever; + this.discovery = discovery; + } + + void authorizeContractCreateOrUpdate(AtlasEntity contractEntity, AtlasEntity associatedAsset) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("authorizeContractUpdate"); + try { + AtlasEntityHeader entityHeader = new AtlasEntityHeader(associatedAsset); + + //First authorize entity update access + verifyAssetAccess(entityHeader, AtlasPrivilege.ENTITY_UPDATE, contractEntity, AtlasPrivilege.ENTITY_UPDATE); + + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + + private void verifyAssetAccess(AtlasEntityHeader asset, AtlasPrivilege assetPrivilege, + AtlasEntity contract, AtlasPrivilege contractPrivilege) throws AtlasBaseException { + verifyAccess(asset, assetPrivilege); + verifyAccess(contract, contractPrivilege); + } + + private void verifyAccess(AtlasEntity entity, AtlasPrivilege privilege) throws AtlasBaseException { + verifyAccess(new AtlasEntityHeader(entity), privilege); + } + + private void verifyAccess(AtlasEntityHeader entityHeader, AtlasPrivilege privilege) throws AtlasBaseException { + String errorMessage = privilege.name() + " entity: " + entityHeader.getTypeName(); + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, privilege, entityHeader), errorMessage); + } + + public AtlasEntity getAssociatedAsset(String datasetQName, DataContract contract) throws AtlasBaseException { + IndexSearchParams indexSearchParams = new IndexSearchParams(); + Map dsl = new HashMap<>(); + int size = 2; + + List> mustClauseList = new ArrayList<>(); + mustClauseList.add(mapOf("term", mapOf(QUALIFIED_NAME, datasetQName))); + if (contract.getType() != null) { + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", contract.getType().name()))); + } else { + mustClauseList.add(mapOf("term", mapOf("__superTypeNames.keyword", SQL_ENTITY_TYPE))); + } + + dsl.put("query", mapOf("bool", mapOf("must", mustClauseList))); + dsl.put("sort", Collections.singletonList(mapOf(ATTR_CONTRACT_VERSION, mapOf("order", "desc")))); + dsl.put("size", size); + + indexSearchParams.setDsl(dsl); + indexSearchParams.setSuppressLogs(true); + + AtlasSearchResult result = discovery.directIndexSearch(indexSearchParams); + if (result == null || CollectionUtils.isEmpty(result.getEntities())) { + throw new AtlasBaseException("Dataset doesn't exist for given qualified name."); + + } else if (result.getEntities().size() >1 ) { + throw new AtlasBaseException("Multiple dataset exists for given qualified name. " + + "Please specify the `type` attribute in contract."); + } else { + AtlasEntityHeader datasetEntity = result.getEntities().get(0); + contract.setType(datasetEntity.getTypeName()); + return new AtlasEntity(datasetEntity); + } + + } + + AtlasEntityType ensureEntityType(String typeName) throws AtlasBaseException { + AtlasEntityType ret = typeRegistry.getEntityTypeByName(typeName); + + if (ret == null) { + throw new AtlasBaseException(TYPE_NAME_INVALID, TypeCategory.ENTITY.name(), typeName); + } + + return ret; + } + + +} diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/ContractPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/ContractPreProcessor.java new file mode 100644 index 0000000000..1a7aa64cfb --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/ContractPreProcessor.java @@ -0,0 +1,302 @@ +package org.apache.atlas.repository.store.graph.v2.preprocessor.contract; + +import org.apache.atlas.RequestContext; +import org.apache.atlas.discovery.EntityDiscoveryService; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.discovery.AtlasSearchResult; +import org.apache.atlas.model.discovery.IndexSearchParams; +import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasStruct; +import org.apache.atlas.model.instance.EntityMutations; +import org.apache.atlas.repository.graphdb.AtlasGraph; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.store.graph.v2.*; +import org.apache.atlas.type.AtlasEntityType; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static org.apache.atlas.AtlasErrorCode.*; +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; +import static org.apache.atlas.type.AtlasTypeUtil.getAtlasObjectId; + +public class ContractPreProcessor extends AbstractContractPreProcessor { + private static final Logger LOG = LoggerFactory.getLogger(ContractPreProcessor.class); + public static final String ATTR_ASSET_GUID = "dataContractAssetGuid"; + public static final String REL_ATTR_LATEST_CONTRACT = "dataContractLatest"; + public static final String REL_ATTR_GOVERNED_ASSET_CERTIFIED = "dataContractLatestCertified"; + public static final String REL_ATTR_PREVIOUS_VERSION = "dataContractPreviousVersion"; + public static final String ASSET_ATTR_HAS_CONTRACT = "hasContract"; + public static final String CONTRACT_QUALIFIED_NAME_SUFFIX = "contract"; + public static final String CONTRACT_ATTR_STATUS = "status"; + private static final Set contractAttributes = new HashSet<>(); + static { + contractAttributes.add(ATTR_CONTRACT); + contractAttributes.add(ATTR_CONTRACT_JSON); + contractAttributes.add(ATTR_CERTIFICATE_STATUS); + contractAttributes.add(ATTR_CONTRACT_VERSION); + } + private final boolean storeDifferentialAudits; + private final EntityDiscoveryService discovery; + + private final AtlasEntityComparator entityComparator; + + + public ContractPreProcessor(AtlasGraph graph, AtlasTypeRegistry typeRegistry, + EntityGraphRetriever entityRetriever, + boolean storeDifferentialAudits, EntityDiscoveryService discovery) { + + super(graph, typeRegistry, entityRetriever, discovery); + this.storeDifferentialAudits = storeDifferentialAudits; + this.discovery = discovery; + this.entityComparator = new AtlasEntityComparator(typeRegistry, entityRetriever, null, true, true); + + } + + @Override + public void processAttributes(AtlasStruct entityStruct, EntityMutationContext context, EntityMutations.EntityOperation operation) throws AtlasBaseException { + AtlasEntity entity = (AtlasEntity) entityStruct; + switch (operation) { + case CREATE: + processCreateContract(entity, context); + break; + case UPDATE: + // Updating an existing version of the contract + processUpdateContract(entity, context); + } + + } + + private void processUpdateContract(AtlasEntity entity, EntityMutationContext context) throws AtlasBaseException { + String contractString = getContractString(entity); + AtlasVertex vertex = context.getVertex(entity.getGuid()); + AtlasEntity existingContractEntity = entityRetriever.toAtlasEntity(vertex); + // No update to relationships allowed for the existing contract version + resetAllRelationshipAttributes(entity); + if (entity.getAttribute(ATTR_CERTIFICATE_STATUS) == DataContract.Status.VERIFIED.name() || + existingContractEntity.getAttribute(ATTR_CERTIFICATE_STATUS) == DataContract.Status.VERIFIED.name()) { + // Update the same asset(entity) + throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Can't update published version of contract."); + } + } + private void processCreateContract(AtlasEntity entity, EntityMutationContext context) throws AtlasBaseException { + /* + Low-level Design + | Authorization + | Deserialization of the JSON + ---| Validation of spec + | Validation of contract + | Create Version + | Create Draft + ---| asset to contract sync + | Create Publish + ---| two-way sync of attribute + */ + + String contractQName = (String) entity.getAttribute(QUALIFIED_NAME); + validateAttribute(!contractQName.endsWith(String.format("/%s", CONTRACT_QUALIFIED_NAME_SUFFIX)), "Invalid qualifiedName for the contract."); + + String contractString = getContractString(entity); + DataContract contract = DataContract.deserialize(contractString); + String datasetQName = contractQName.substring(0, contractQName.lastIndexOf('/')); + AtlasEntity associatedAsset = getAssociatedAsset(datasetQName, contract); + contractQName = String.format("%s/%s/%s", datasetQName, associatedAsset.getTypeName(), CONTRACT_QUALIFIED_NAME_SUFFIX); + + authorizeContractCreateOrUpdate(entity, associatedAsset); + + boolean contractSync = syncContractCertificateStatus(entity, contract); + contractString = DataContract.serialize(contract); + entity.setAttribute(ATTR_CONTRACT, contractString); + String contractStringJSON = DataContract.serializeJSON(contract); + entity.setAttribute(ATTR_CONTRACT_JSON, contractStringJSON); + + AtlasEntity currentVersionEntity = getCurrentVersion(associatedAsset.getGuid()); + Long newVersionNumber = 1L; + if (currentVersionEntity == null && contract.getStatus() == DataContract.Status.VERIFIED) { + throw new AtlasBaseException("Can't create a new published version"); + } + if (currentVersionEntity != null) { + // Contract already exist + Long currentVersionNumber = (Long) currentVersionEntity.getAttribute(ATTR_CONTRACT_VERSION); + List attributes = getDiffAttributes(entity, currentVersionEntity); + if (attributes.isEmpty()) { + // No changes in the contract, Not creating new version + removeCreatingVertex(context, entity); + return; + } else if (!currentVersionEntity.getAttribute(ATTR_CERTIFICATE_STATUS).equals(DataContract.Status.VERIFIED.name())) { + resetAllRelationshipAttributes(entity); + // Contract is in draft state. Update the same version + updateExistingVersion(context, entity, currentVersionEntity); + newVersionNumber = currentVersionNumber; + } else { + // Current version is published. Creating a new draft version. + if (contract.getStatus() == DataContract.Status.VERIFIED) { + throw new AtlasBaseException("Can't create a new published version"); + } + newVersionNumber = currentVersionNumber + 1; + + resetAllRelationshipAttributes(entity); + // Attach previous version via rel + entity.setRelationshipAttribute(REL_ATTR_PREVIOUS_VERSION, getAtlasObjectId(currentVersionEntity)); + AtlasVertex vertex = AtlasGraphUtilsV2.findByGuid(currentVersionEntity.getGuid()); + AtlasEntityType entityType = ensureEntityType(currentVersionEntity.getTypeName()); + context.addUpdated(currentVersionEntity.getGuid(), currentVersionEntity, entityType, vertex); + + } + } + entity.setAttribute(QUALIFIED_NAME, String.format("%s/V%s", contractQName, newVersionNumber)); + entity.setAttribute(ATTR_CONTRACT_VERSION, newVersionNumber); + entity.setAttribute(ATTR_ASSET_GUID, associatedAsset.getGuid()); + + datasetAttributeSync(context, associatedAsset, entity); + + } + + private List getDiffAttributes(AtlasEntity entity, AtlasEntity latestExistingVersion) throws AtlasBaseException { + AtlasEntityComparator.AtlasEntityDiffResult diffResult = entityComparator.getDiffResult(entity, latestExistingVersion, false); + List attributesSet = new ArrayList<>(); + + if (diffResult.hasDifference()) { + for (Map.Entry entry : diffResult.getDiffEntity().getAttributes().entrySet()) { + if (!entry.getKey().equals(QUALIFIED_NAME)) { + attributesSet.add(entry.getKey()); + } + } + } + return attributesSet; + } + + private void updateExistingVersion(EntityMutationContext context, AtlasEntity entity, AtlasEntity currentVersionEntity) throws AtlasBaseException { + removeCreatingVertex(context, entity); + entity.setAttribute(QUALIFIED_NAME, currentVersionEntity.getAttribute(QUALIFIED_NAME)); + entity.setGuid(currentVersionEntity.getGuid()); + AtlasVertex vertex = AtlasGraphUtilsV2.findByGuid(entity.getGuid()); + AtlasEntityType entityType = ensureEntityType(entity.getTypeName()); + + context.addUpdated(entity.getGuid(), entity, entityType, vertex); + recordEntityMutatedDetails(context, entity, vertex); + + } + + public AtlasEntity getCurrentVersion(String datasetGuid) throws AtlasBaseException { + IndexSearchParams indexSearchParams = new IndexSearchParams(); + Map dsl = new HashMap<>(); + int size = 1; + + List> mustClauseList = new ArrayList<>(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", CONTRACT_ENTITY_TYPE))); + mustClauseList.add(mapOf("term", mapOf(ATTR_ASSET_GUID, datasetGuid))); + + dsl.put("query", mapOf("bool", mapOf("must", mustClauseList))); + dsl.put("sort", Collections.singletonList(mapOf(ATTR_CONTRACT_VERSION, mapOf("order", "desc")))); + dsl.put("size", size); + + indexSearchParams.setDsl(dsl); + indexSearchParams.setAttributes(contractAttributes); + indexSearchParams.setSuppressLogs(true); + + AtlasSearchResult result = discovery.directIndexSearch(indexSearchParams); + if (result == null || CollectionUtils.isEmpty(result.getEntities())) { + return null; + } + return new AtlasEntity(result.getEntities().get(0)); + } + + private void removeCreatingVertex(EntityMutationContext context, AtlasEntity entity) { + context.getCreatedEntities().remove(entity); + graph.removeVertex(context.getVertex(entity.getGuid())); + } + + private void resetAllRelationshipAttributes(AtlasEntity entity) { + if (entity.getRemoveRelationshipAttributes() != null) { + entity.setRemoveRelationshipAttributes(null); + } + if (entity.getAppendRelationshipAttributes() != null) { + entity.setAppendRelationshipAttributes(null); + } + if (entity.getRelationshipAttributes() != null) { + entity.setRelationshipAttributes(null); + } + } + + private boolean syncContractCertificateStatus(AtlasEntity entity, DataContract contract) throws AtlasBaseException { + boolean contractSync = false; + // Sync certificateStatus + if (!Objects.equals(entity.getAttribute(ATTR_CERTIFICATE_STATUS), contract.getStatus().name())) { + /* + CertificateStatus | Status | Result + DRAFT VERIFIED cert -> VERIFIED > + VERIFIED DRAFT stat -> VERIFIED > + - DRAFT cert -> DRAFT + - VERIFIED cert -> VERIFIED > + DRAFT - stat -> DRAFT + VERIFIED - stat -> VERIFIED > + + */ + if (Objects.equals(entity.getAttribute(ATTR_CERTIFICATE_STATUS), DataContract.Status.VERIFIED.name())) { + contract.setStatus(String.valueOf(DataContract.Status.VERIFIED)); + contractSync = true; + } else if (Objects.equals(contract.getStatus(), DataContract.Status.VERIFIED)) { + entity.setAttribute(ATTR_CERTIFICATE_STATUS, DataContract.Status.VERIFIED.name()); + } else { + entity.setAttribute(ATTR_CERTIFICATE_STATUS, DataContract.Status.DRAFT); + contract.setStatus(String.valueOf(DataContract.Status.DRAFT)); + contractSync = true; + } + + } + return contractSync; + + } + + private void datasetAttributeSync(EntityMutationContext context, AtlasEntity associatedAsset, AtlasEntity contractAsset) throws AtlasBaseException { + // Creating new empty AtlasEntity to update with selective attributes only + AtlasEntity entity = new AtlasEntity(associatedAsset.getTypeName()); + entity.setGuid(associatedAsset.getGuid()); + entity.setAttribute(QUALIFIED_NAME, associatedAsset.getAttribute(QUALIFIED_NAME)); + if (associatedAsset.getAttribute(ASSET_ATTR_HAS_CONTRACT) == null || associatedAsset.getAttribute(ASSET_ATTR_HAS_CONTRACT).equals(false)) { + entity.setAttribute(ASSET_ATTR_HAS_CONTRACT, true); + } + + // Update relationship with contract + entity.setRelationshipAttribute(REL_ATTR_LATEST_CONTRACT, getAtlasObjectId(contractAsset)); + if (Objects.equals(contractAsset.getAttribute(ATTR_CERTIFICATE_STATUS), DataContract.Status.VERIFIED.name()) ) { + entity.setRelationshipAttribute(REL_ATTR_GOVERNED_ASSET_CERTIFIED, getAtlasObjectId(contractAsset)); + } + + AtlasVertex vertex = AtlasGraphUtilsV2.findByGuid(entity.getGuid()); + AtlasEntityType entityType = ensureEntityType(entity.getTypeName()); + context.addUpdated(entity.getGuid(), entity, entityType, vertex); + recordEntityMutatedDetails(context, entity, vertex); + } + + private void recordEntityMutatedDetails(EntityMutationContext context, AtlasEntity entity, AtlasVertex vertex) throws AtlasBaseException { + AtlasEntityComparator entityComparator = new AtlasEntityComparator(typeRegistry, entityRetriever, context.getGuidAssignments(), true, true); + AtlasEntityComparator.AtlasEntityDiffResult diffResult = entityComparator.getDiffResult(entity, vertex, !storeDifferentialAudits); + RequestContext reqContext = RequestContext.get(); + if (diffResult.hasDifference()) { + if (storeDifferentialAudits) { + diffResult.getDiffEntity().setGuid(entity.getGuid()); + reqContext.cacheDifferentialEntity(diffResult.getDiffEntity()); + } + } + } + + private static void validateAttribute(boolean isInvalid, String errorMessage) throws AtlasBaseException { + if (isInvalid) + throw new AtlasBaseException(BAD_REQUEST, errorMessage); + } + + private static String getContractString(AtlasEntity entity) { + String contractString = (String) entity.getAttribute(ATTR_CONTRACT); + if (StringUtils.isEmpty(contractString)) { + contractString = (String) entity.getAttribute(ATTR_CONTRACT_JSON); + } + return contractString; + } +} diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/DataContract.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/DataContract.java new file mode 100644 index 0000000000..03aee74c08 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/DataContract.java @@ -0,0 +1,321 @@ +package org.apache.atlas.repository.store.graph.v2.preprocessor.contract; + +import java.lang.String; +import java.util.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import com.fasterxml.jackson.annotation.*; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.commons.lang.StringUtils; + +import javax.validation.*; +import javax.validation.constraints.NotNull; +import java.util.Set; + +import static org.apache.atlas.AtlasErrorCode.*; + + +@JsonIgnoreProperties(ignoreUnknown = true) +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonPropertyOrder({"kind", "status", "template_version", "data_source", "dataset", "type", "description", "owners", + "tags", "certificate", "columns"}) +public class DataContract { + private static final String KIND_VALUE = "DataContract"; + private static final Pattern versionPattern = Pattern.compile("^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$"); + private static final ObjectMapper objectMapperYAML = new ObjectMapper(new YAMLFactory()); + private static final ObjectMapper objectMapperJSON = new ObjectMapper(); + static { + objectMapperYAML.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS); + objectMapperJSON.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS); + } + + @Valid @NotNull + public String kind; + public Status status = Status.DRAFT; + @JsonProperty(value = "template_version") + public String templateVersion = "0.0.1"; + public String data_source; + @Valid @NotNull + public String dataset; + public DatasetType type; + public String description; + public List owners; + public List tags; + public String certificate; + @Valid + public List columns; + private final Map unknownFields = new HashMap<>(); + + public enum Status { + @JsonProperty("DRAFT") DRAFT, + @JsonProperty("VERIFIED") VERIFIED; + + public static Status from(String s) { + if(StringUtils.isEmpty(s)) { + return DRAFT; + } + switch (s.toLowerCase()) { + case "draft": + return DRAFT; + + case "verified": + return VERIFIED; + + default: + return DRAFT; + } + } + } + public enum DatasetType { + @JsonProperty("Table") Table, + @JsonProperty("View") View, + @JsonProperty("MaterialisedView") MaterialisedView; + + public static DatasetType from(String s) throws AtlasBaseException { + + switch (s.toLowerCase()) { + case "table": + return Table; + case "view": + return View; + case "materialisedview": + return MaterialisedView; + default: + throw new AtlasBaseException(String.format("type: %s value not supported yet.", s)); + } + } + } + + public Status getStatus() { + return status; + } + + public DatasetType getType() { + return type; + } + + @JsonAnyGetter + public Map getUnknownFields() { + return unknownFields; + } + + @JsonSetter("kind") + public void setKind(String kind) throws AtlasBaseException { + if (!KIND_VALUE.equals(kind)) { + throw new AtlasBaseException(AtlasErrorCode.INVALID_VALUE, "kind " + kind + " is inappropriate."); + } + this.kind = kind; + } + + @JsonSetter("status") + public void setStatus(String status) throws AtlasBaseException { + try { + this.status = Status.from(status); + } catch (IllegalArgumentException ex) { + throw new AtlasBaseException(AtlasErrorCode.INVALID_VALUE, "status " + status + " is inappropriate. Accepted values: " + Arrays.toString(Status.values())); + } + } + + public void setTemplateVersion(String templateVersion) throws AtlasBaseException { + if (!isSemVer(templateVersion)) { + throw new AtlasBaseException(AtlasErrorCode.INVALID_VALUE, "Invalid template_version syntax"); + } + this.templateVersion = templateVersion; + } + + @JsonSetter("data_source") + public void setDataSource(String data_source) { + this.data_source = data_source; + } + + public void setDataset(String dataset) { + this.dataset = dataset; + } + + public void setType(String type) throws AtlasBaseException { + try { + this.type = DatasetType.from(type); + } catch (IllegalArgumentException | AtlasBaseException ex) { + throw new AtlasBaseException(AtlasErrorCode.INVALID_VALUE, "type: " + type + " is inappropriate. Accepted values: " + Arrays.toString(DatasetType.values())); + } + } + + public void setOwners(List owners) { + this.owners = owners; + } + + public void setTags(List tags) { + this.tags = tags; + } + + public void setColumns(List columns) { + this.columns = columns; + } + + @JsonAnySetter + public void setUnknownFields(String key, Object value) { + unknownFields.put(key, value); + } + + private boolean isSemVer(String version) { + Matcher matcher = versionPattern.matcher(version); + return matcher.matches(); + } + + @JsonIgnoreProperties(ignoreUnknown = true) + @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonPropertyOrder({"name"}) + public static final class BusinessTag { + @NotNull + public String name; + private final Map unknownFields = new HashMap<>(); + + @JsonAnySetter + public void setUnknownFields(String key, Object value) { + unknownFields.put(key, value); + } + @JsonAnyGetter + public Map getUnknownFields() { + return unknownFields; + } + + @Override + public boolean equals(Object o) { + if (this == o) { return true; } + if (o == null || getClass() != o.getClass()) { return false; } + BusinessTag that = (BusinessTag) o; + return Objects.equals(name, that.name) && + Objects.equals(unknownFields, that.unknownFields); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), name, unknownFields); + } + + } + + @JsonIgnoreProperties(ignoreUnknown = true) + @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonPropertyOrder({"name", "description", "data_type"}) + public static final class Column { + @NotNull + public String name; + + public String description; + + public String data_type; + private final Map unknownFields = new HashMap<>(); + + @JsonAnySetter + public void setUnknownFields(String key, Object value) { + unknownFields.put(key, value); + } + @JsonAnyGetter + public Map getUnknownFields() { + return unknownFields; + } + + @Override + public boolean equals(Object o) { + if (this == o) { return true; } + if (o == null || getClass() != o.getClass()) { return false; } + Column that = (Column) o; + return Objects.equals(name, that.name) && + Objects.equals(description, that.description) && + Objects.equals(data_type, that.data_type) && + Objects.equals(unknownFields, that.unknownFields); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), name, description, data_type, unknownFields); + } + } + + public static DataContract deserialize(String contractString) throws AtlasBaseException { + + if (StringUtils.isEmpty(contractString)) { + throw new AtlasBaseException(BAD_REQUEST, "Missing attribute: contract."); + } + + DataContract contract; + try { + contract = objectMapperYAML.readValue(contractString, DataContract.class); + } catch (JsonProcessingException ex) { + try { + contract = objectMapperJSON.readValue(contractString, DataContract.class); + } catch (JsonProcessingException e) { + throw new AtlasBaseException(ex.getOriginalMessage()); + } + } + contract.validate(); + return contract; + + } + + public void validate() throws AtlasBaseException { + Validator validator = Validation.buildDefaultValidatorFactory().getValidator(); + Set> violations = validator.validate(this); + if (!violations.isEmpty()) { + List errorMessageList = new ArrayList<>(); + for (ConstraintViolation violation : violations) { + errorMessageList.add(String.format("Field: %s -> %s", violation.getPropertyPath(), violation.getMessage())); + System.out.println(violation.getMessage()); + } + throw new AtlasBaseException(StringUtils.join(errorMessageList, "; ")); + } + + } + + public static String serialize(DataContract contract) throws AtlasBaseException { + + try { + return objectMapperYAML.writeValueAsString(contract); + } catch (JsonProcessingException ex) { + throw new AtlasBaseException(JSON_ERROR, ex.getMessage()); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { return true; } + if (o == null || getClass() != o.getClass()) { return false; } + + DataContract that = (DataContract) o; + return Objects.equals(kind, that.kind) && + Objects.equals(status, that.status) && + Objects.equals(templateVersion, that.templateVersion) && + Objects.equals(data_source, that.data_source) && + Objects.equals(dataset, that.dataset) && + Objects.equals(type, that.type) && + Objects.equals(description, that.description) && + Objects.equals(owners, that.owners) && + Objects.equals(tags, that.tags) && + Objects.equals(certificate, that.certificate) && + Objects.equals(columns, that.columns) && + Objects.equals(unknownFields, that.unknownFields); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), kind, status, templateVersion, data_source, dataset, type, description, owners, + tags, certificate, columns, unknownFields); + } + + public static String serializeJSON(DataContract contract) throws AtlasBaseException { + + try { + return objectMapperJSON.writeValueAsString(contract); + } catch (JsonProcessingException ex) { + throw new AtlasBaseException(JSON_ERROR, ex.getMessage()); + } + } +} + diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/AbstractDomainPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/AbstractDomainPreProcessor.java new file mode 100644 index 0000000000..ff6ad04b5e --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/AbstractDomainPreProcessor.java @@ -0,0 +1,376 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh; + +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.AtlasException; +import org.apache.atlas.RequestContext; +import org.apache.atlas.authorize.AtlasAuthorizationUtils; +import org.apache.atlas.authorize.AtlasEntityAccessRequest; +import org.apache.atlas.authorize.AtlasPrivilege; +import org.apache.atlas.discovery.EntityDiscoveryService; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasEntityHeader; +import org.apache.atlas.model.instance.AtlasObjectId; +import org.apache.atlas.model.instance.EntityMutations; +import org.apache.atlas.repository.graph.GraphHelper; +import org.apache.atlas.repository.graphdb.AtlasGraph; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; +import org.apache.atlas.repository.store.graph.v2.EntityMutationContext; +import org.apache.atlas.repository.store.graph.v2.preprocessor.AuthPolicyPreProcessor; +import org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessor; +import org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils; +import org.apache.atlas.type.AtlasEntityType; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.accesscontrol.StakeholderPreProcessor.ATTR_DOMAIN_QUALIFIED_NAME; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh.StakeholderTitlePreProcessor.ATTR_DOMAIN_QUALIFIED_NAMES; +import static org.apache.atlas.repository.util.AccessControlUtils.ATTR_POLICY_RESOURCES; +import static org.apache.atlas.repository.util.AccessControlUtils.REL_ATTR_ACCESS_CONTROL; +import static org.apache.atlas.repository.util.AtlasEntityUtils.getListAttribute; +import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; + +public abstract class AbstractDomainPreProcessor implements PreProcessor { + private static final Logger LOG = LoggerFactory.getLogger(AbstractDomainPreProcessor.class); + + + protected final AtlasGraph graph; + protected final AtlasTypeRegistry typeRegistry; + protected final EntityGraphRetriever entityRetriever; + protected EntityGraphRetriever entityRetrieverNoRelations; + private final PreProcessor preProcessor; + protected EntityDiscoveryService discovery; + + private static final Set POLICY_ATTRIBUTES_FOR_SEARCH = new HashSet<>(Arrays.asList(ATTR_POLICY_RESOURCES)); + private static final Set STAKEHOLDER_ATTRIBUTES_FOR_SEARCH = new HashSet<>(Arrays.asList(ATTR_DOMAIN_QUALIFIED_NAMES, ATTR_DOMAIN_QUALIFIED_NAME)); + + static final Set PARENT_ATTRIBUTES = new HashSet<>(Arrays.asList(SUPER_DOMAIN_QN_ATTR, PreProcessorUtils.PARENT_DOMAIN_QN_ATTR)); + + static final Map customAttributes = new HashMap<>(); + + static { + customAttributes.put(MIGRATION_CUSTOM_ATTRIBUTE, "true"); + } + + AbstractDomainPreProcessor(AtlasTypeRegistry typeRegistry, EntityGraphRetriever entityRetriever, AtlasGraph graph) { + this.graph = graph; + this.entityRetriever = entityRetriever; + this.typeRegistry = typeRegistry; + this.preProcessor = new AuthPolicyPreProcessor(graph, typeRegistry, entityRetriever); + + try { + this.entityRetrieverNoRelations = new EntityGraphRetriever(graph, typeRegistry, true); + this.discovery = new EntityDiscoveryService(typeRegistry, graph, null, null, null, null); + } catch (AtlasException e) { + e.printStackTrace(); + } + } + + protected void isAuthorizedToMove(String typeName, AtlasEntityHeader sourceDomain, AtlasEntityHeader targetDomain) throws AtlasBaseException { + + String qualifiedNameToAuthSuffix = DATA_DOMAIN_ENTITY_TYPE.equals(typeName) ? "/*domain/*" : "/*product/*"; + AtlasEntityHeader headerToAuth = new AtlasEntityHeader(typeName); + + if (sourceDomain != null) { + //Update sub-domains/product on source parent + String qualifiedNameToAuth = sourceDomain.getAttribute(QUALIFIED_NAME) + qualifiedNameToAuthSuffix; + headerToAuth.setAttribute(QUALIFIED_NAME, qualifiedNameToAuth); + + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_UPDATE, headerToAuth), + AtlasPrivilege.ENTITY_UPDATE.name(), " " , typeName, " : ", qualifiedNameToAuth); + } + + if (targetDomain != null) { + //Create sub-domains/product on target parent + String qualifiedNameToAuth = targetDomain.getAttribute(QUALIFIED_NAME) + qualifiedNameToAuthSuffix; + headerToAuth.setAttribute(QUALIFIED_NAME, qualifiedNameToAuth); + + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_CREATE, headerToAuth), + AtlasPrivilege.ENTITY_CREATE.name(), " " , typeName, " : ", qualifiedNameToAuth); + } + } + + protected void updatePolicies(Map updatedPolicyResources, EntityMutationContext context) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("updatePolicies"); + try { + AtlasEntityType entityType = typeRegistry.getEntityTypeByName(POLICY_ENTITY_TYPE); + + if (MapUtils.isEmpty(updatedPolicyResources)) { + return; + } + + List policies = getPolicies(updatedPolicyResources.keySet()); + LOG.info("Found {} policies to update", policies.size()); + + if (CollectionUtils.isNotEmpty(policies)) { + for (AtlasEntityHeader policy : policies) { + LOG.info("Updating Policy {}", policy.getGuid()); + AtlasVertex policyVertex = entityRetriever.getEntityVertex(policy.getGuid()); + + AtlasEntity policyEntity = entityRetriever.toAtlasEntity(policyVertex); + + if (policyEntity.hasRelationshipAttribute(REL_ATTR_ACCESS_CONTROL) && policyEntity.getRelationshipAttribute(REL_ATTR_ACCESS_CONTROL) != null) { + AtlasObjectId accessControlObjId = (AtlasObjectId) policyEntity.getRelationshipAttribute(REL_ATTR_ACCESS_CONTROL); + AtlasVertex accessControl = entityRetriever.getEntityVertex(accessControlObjId.getGuid()); + context.getDiscoveryContext().addResolvedGuid(GraphHelper.getGuid(accessControl), accessControl); + } + + List policyResources = (List) policyEntity.getAttribute(ATTR_POLICY_RESOURCES); + + List updatedPolicyResourcesList = new ArrayList<>(); + + for (String resource : policyResources) { + if (updatedPolicyResources.containsKey(resource)) { + updatedPolicyResourcesList.add(updatedPolicyResources.get(resource)); + } else { + updatedPolicyResourcesList.add(resource); + } + } + Map updatedAttributes = new HashMap<>(); + updatedAttributes.put(ATTR_POLICY_RESOURCES, updatedPolicyResourcesList); + + //policyVertex.removeProperty(ATTR_POLICY_RESOURCES); + policyEntity.setAttribute(ATTR_POLICY_RESOURCES, updatedPolicyResourcesList); + + context.addUpdated(policyEntity.getGuid(), policyEntity, entityType, policyVertex); + recordUpdatedChildEntities(policyVertex, updatedAttributes); + this.preProcessor.processAttributes(policyEntity, context, EntityMutations.EntityOperation.UPDATE); + } + } + + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + protected void updateStakeholderTitlesAndStakeholders(Map updatedDomainQualifiedNames, EntityMutationContext context) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("updateStakeholderTitlesAndStakeholders"); + try { + + if (MapUtils.isEmpty(updatedDomainQualifiedNames)) { + return; + } + + List assets = getStakeholderTitlesAndStakeholders(updatedDomainQualifiedNames.keySet()); + + if (CollectionUtils.isNotEmpty(assets)) { + for (AtlasEntityHeader asset : assets) { + AtlasVertex vertex = entityRetrieverNoRelations.getEntityVertex(asset.getGuid()); + AtlasEntity entity = entityRetrieverNoRelations.toAtlasEntity(vertex); + Map updatedAttributes = new HashMap<>(); + AtlasEntityType entityType = null; + + if (entity.getTypeName().equals(STAKEHOLDER_ENTITY_TYPE)) { + entityType = typeRegistry.getEntityTypeByName(STAKEHOLDER_ENTITY_TYPE); + + String currentDomainQualifiedName = (String) asset.getAttribute(ATTR_DOMAIN_QUALIFIED_NAME); + + entity.setAttribute(ATTR_DOMAIN_QUALIFIED_NAME, updatedDomainQualifiedNames.get(currentDomainQualifiedName)); + updatedAttributes.put(ATTR_DOMAIN_QUALIFIED_NAME, updatedDomainQualifiedNames.get(currentDomainQualifiedName)); + + } else if (entity.getTypeName().equals(STAKEHOLDER_TITLE_ENTITY_TYPE)) { + entityType = typeRegistry.getEntityTypeByName(STAKEHOLDER_TITLE_ENTITY_TYPE); + + List currentDomainQualifiedNames = getListAttribute(asset, ATTR_DOMAIN_QUALIFIED_NAMES); + + List newDomainQualifiedNames = new ArrayList<>(); + + for (String qualifiedName : currentDomainQualifiedNames) { + if (updatedDomainQualifiedNames.containsKey(qualifiedName)) { + newDomainQualifiedNames.add(updatedDomainQualifiedNames.get(qualifiedName)); + } else { + newDomainQualifiedNames.add(qualifiedName); + } + } + + entity.setAttribute(ATTR_DOMAIN_QUALIFIED_NAMES, newDomainQualifiedNames); + updatedAttributes.put(ATTR_DOMAIN_QUALIFIED_NAMES, newDomainQualifiedNames); + } + + context.addUpdated(entity.getGuid(), entity, entityType, vertex); + recordUpdatedChildEntities(vertex, updatedAttributes); + } + } + + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + protected void exists(String assetType, String assetName, String parentDomainQualifiedName, String guid) throws AtlasBaseException { + boolean exists = false; + + List> mustClauseList = new ArrayList(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", assetType))); + mustClauseList.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustClauseList.add(mapOf("term", mapOf("name.keyword", assetName))); + List> mustNotClauseList = new ArrayList(); + if(StringUtils.isNotEmpty(guid)){ + mustNotClauseList.add(mapOf("term", mapOf("__guid", guid))); + } + + Map bool = new HashMap<>(); + if (StringUtils.isNotEmpty(parentDomainQualifiedName)) { + mustClauseList.add(mapOf("term", mapOf("parentDomainQualifiedName", parentDomainQualifiedName))); + } else { + mustNotClauseList.add(mapOf("exists", mapOf("field", "parentDomainQualifiedName"))); + } + + bool.put("must", mustClauseList); + if(!mustNotClauseList.isEmpty()) { + bool.put("must_not", mustNotClauseList); + } + Map dsl = mapOf("query", mapOf("bool", bool)); + + List assets = indexSearchPaginated(dsl, null, this.discovery); + + if (CollectionUtils.isNotEmpty(assets)) { + for (AtlasEntityHeader asset : assets) { + String name = (String) asset.getAttribute(NAME); + if (assetName.equals(name)) { + exists = true; + break; + } + } + } + + if (exists) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, + String.format("%s with name %s already exists in the domain", assetType, assetName)); + } + } + + protected List getPolicies(Set resources) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("getPolicies"); + try { + List> mustClauseList = new ArrayList<>(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", POLICY_ENTITY_TYPE))); + mustClauseList.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustClauseList.add(mapOf("terms", mapOf("policyResources", resources))); + + Map bool = new HashMap<>(); + bool.put("must", mustClauseList); + + Map dsl = mapOf("query", mapOf("bool", bool)); + + return indexSearchPaginated(dsl, POLICY_ATTRIBUTES_FOR_SEARCH, discovery); + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + protected List getStakeholderTitlesAndStakeholders(Set qualifiedNames) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("getStakeholderTitlesAndStakeholders"); + try { + List> mustClauseList = new ArrayList<>(); + mustClauseList.add(mapOf("terms", mapOf("__typeName.keyword", Arrays.asList(STAKEHOLDER_ENTITY_TYPE, STAKEHOLDER_TITLE_ENTITY_TYPE)))); + + List> shouldClauseList = new ArrayList<>(); + shouldClauseList.add(mapOf("terms", mapOf("stakeholderTitleDomainQualifiedNames", qualifiedNames))); + shouldClauseList.add(mapOf("terms", mapOf("stakeholderDomainQualifiedName", qualifiedNames))); + + mustClauseList.add(mapOf("bool", mapOf("should", shouldClauseList))); + + Map bool = new HashMap<>(); + bool.put("must", mustClauseList); + + Map dsl = mapOf("query", mapOf("bool", bool)); + + return indexSearchPaginated(dsl, STAKEHOLDER_ATTRIBUTES_FOR_SEARCH, discovery); + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + /** + * Record the updated child entities, it will be used to send notification and store audit logs + * @param entityVertex Child entity vertex + * @param updatedAttributes Updated attributes while updating required attributes on updating collection + */ + protected void recordUpdatedChildEntities(AtlasVertex entityVertex, Map updatedAttributes) { + RequestContext requestContext = RequestContext.get(); + AtlasPerfMetrics.MetricRecorder metricRecorder = requestContext.startMetricRecord("recordUpdatedChildEntities"); + AtlasEntity entity = new AtlasEntity(); + entity = entityRetriever.mapSystemAttributes(entityVertex, entity); + entity.setAttributes(updatedAttributes); + requestContext.cacheDifferentialEntity(new AtlasEntity(entity)); + + AtlasEntityType entityType = typeRegistry.getEntityTypeByName(entity.getTypeName()); + + //Add the min info attributes to entity header to be sent as part of notification + if(entityType != null) { + AtlasEntity finalEntity = entity; + entityType.getMinInfoAttributes().values().stream().filter(attribute -> !updatedAttributes.containsKey(attribute.getName())).forEach(attribute -> { + Object attrValue = null; + try { + attrValue = entityRetriever.getVertexAttribute(entityVertex, attribute); + } catch (AtlasBaseException e) { + LOG.error("Error while getting vertex attribute", e); + } + if(attrValue != null) { + finalEntity.setAttribute(attribute.getName(), attrValue); + } + }); + requestContext.recordEntityUpdate(new AtlasEntityHeader(finalEntity)); + } + + requestContext.endMetricRecord(metricRecorder); + } + + protected AtlasEntityHeader getParent(Object parentObject, Set attributes) throws AtlasBaseException { + if (parentObject == null) { + return null; + } + + AtlasObjectId objectId; + if (parentObject instanceof Map) { + objectId = getAtlasObjectIdFromMapObject(parentObject); + } else { + objectId = (AtlasObjectId) parentObject; + } + + AtlasVertex parentVertex = entityRetriever.getEntityVertex(objectId); + return entityRetriever.toAtlasEntityHeader(parentVertex, attributes); + } + + public static AtlasObjectId getAtlasObjectIdFromMapObject(Object obj) { + Map parentMap = (Map) obj; + AtlasObjectId objectId = new AtlasObjectId(); + objectId.setTypeName((String) parentMap.get("typeName")); + + if (parentMap.containsKey("guid")) { + objectId.setGuid((String) parentMap.get("guid")); + } else { + objectId.setUniqueAttributes((Map) parentMap.get("uniqueAttributes")); + } + + return objectId; + } +} diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/DataDomainPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/DataDomainPreProcessor.java new file mode 100644 index 0000000000..afd3b0500e --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/DataDomainPreProcessor.java @@ -0,0 +1,488 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh; + + +import com.sun.org.apache.bcel.internal.generic.NEW; +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.RequestContext; +import org.apache.atlas.authorize.AtlasAuthorizationUtils; +import org.apache.atlas.authorize.AtlasEntityAccessRequest; +import org.apache.atlas.authorize.AtlasPrivilege; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.instance.*; +import org.apache.atlas.repository.graph.GraphHelper; +import org.apache.atlas.repository.graphdb.AtlasEdge; +import org.apache.atlas.repository.graphdb.AtlasEdgeDirection; +import org.apache.atlas.repository.graphdb.AtlasGraph; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.store.graph.AtlasEntityStore; +import org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2; +import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; +import org.apache.atlas.repository.store.graph.v2.EntityMutationContext; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.graph.GraphHelper.*; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh.StakeholderTitlePreProcessor.*; +import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; + +public class DataDomainPreProcessor extends AbstractDomainPreProcessor { + private static final Logger LOG = LoggerFactory.getLogger(DataDomainPreProcessor.class); + + private EntityMutationContext context; + private Map updatedPolicyResources; + private EntityGraphRetriever retrieverNoRelation = null; + private Map updatedDomainQualifiedNames; + private AtlasEntityStore entityStore; + + public DataDomainPreProcessor(AtlasTypeRegistry typeRegistry, EntityGraphRetriever entityRetriever, + AtlasGraph graph, AtlasEntityStore entityStore) { + super(typeRegistry, entityRetriever, graph); + this.updatedPolicyResources = new HashMap<>(); + this.retrieverNoRelation = new EntityGraphRetriever(graph, typeRegistry, true); + this.updatedDomainQualifiedNames = new HashMap<>(); + this.entityStore = entityStore; + } + + @Override + public void processAttributes(AtlasStruct entityStruct, EntityMutationContext context, + EntityMutations.EntityOperation operation) throws AtlasBaseException { + if (LOG.isDebugEnabled()) { + LOG.debug("DataDomainPreProcessor.processAttributes: pre processing {}, {}", + entityStruct.getAttribute(QUALIFIED_NAME), operation); + } + + this.context = context; + + AtlasEntity entity = (AtlasEntity) entityStruct; + + switch (operation) { + case CREATE: + processCreateDomain(entity); + break; + case UPDATE: + AtlasVertex vertex = context.getVertex(entity.getGuid()); + processUpdateDomain(entity, vertex); + break; + } + } + + private void processCreateDomain(AtlasEntity entity) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processCreateDomain"); + + validateStakeholderRelationship(entity); + + String domainName = (String) entity.getAttribute(NAME); + + String parentDomainQualifiedName = ""; + AtlasObjectId parentDomainObject = (AtlasObjectId) entity.getRelationshipAttribute(PARENT_DOMAIN_REL_TYPE); + AtlasVertex parentDomain = null; + + if(parentDomainObject != null ){ + parentDomain = retrieverNoRelation.getEntityVertex(parentDomainObject); + parentDomainQualifiedName = parentDomain.getProperty(QUALIFIED_NAME, String.class); + if(StringUtils.isNotEmpty(parentDomainQualifiedName)) { + entity.setAttribute(PARENT_DOMAIN_QN_ATTR, parentDomainQualifiedName); + String superDomainQualifiedName = parentDomain.getProperty(SUPER_DOMAIN_QN_ATTR, String.class); + if(StringUtils.isEmpty(parentDomain.getProperty(SUPER_DOMAIN_QN_ATTR, String.class))) { + superDomainQualifiedName = parentDomainQualifiedName; + } + entity.setAttribute(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + } + } else { + entity.removeAttribute(PARENT_DOMAIN_QN_ATTR); + entity.removeAttribute(SUPER_DOMAIN_QN_ATTR); + } + + entity.setAttribute(QUALIFIED_NAME, createQualifiedName(parentDomainQualifiedName)); + + // Check if authorized to create entities + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_CREATE, new AtlasEntityHeader(entity)), + "create entity: type=", entity.getTypeName()); + + entity.setCustomAttributes(customAttributes); + + domainExists(domainName, parentDomainQualifiedName, null); + + RequestContext.get().endMetricRecord(metricRecorder); + } + + private void processUpdateDomain(AtlasEntity entity, AtlasVertex vertex) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processUpdateDomain"); + + // Validate Relationship + if(entity.hasRelationshipAttribute(SUB_DOMAIN_REL_TYPE) || entity.hasRelationshipAttribute(DATA_PRODUCT_REL_TYPE)){ + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Cannot update Domain's subDomains or dataProducts relations"); + } + + validateStakeholderRelationship(entity); + + String vertexQnName = vertex.getProperty(QUALIFIED_NAME, String.class); + entity.setAttribute(QUALIFIED_NAME, vertexQnName); + // Check if authorized to update entities + AtlasAuthorizationUtils.verifyUpdateEntityAccess(typeRegistry, new AtlasEntityHeader(entity),"update entity: type=" + entity.getTypeName()); + + + AtlasEntity storedDomain = entityRetriever.toAtlasEntity(vertex); + AtlasRelatedObjectId currentParentDomainObjectId = (AtlasRelatedObjectId) storedDomain.getRelationshipAttribute(PARENT_DOMAIN_REL_TYPE); + + String newSuperDomainQualifiedName = ""; + String newParentDomainQualifiedName = ""; + String currentParentDomainQualifiedName = ""; + + AtlasEntityHeader currentParentDomainHeader = null; + + if(currentParentDomainObjectId != null){ + currentParentDomainHeader = entityRetriever.toAtlasEntityHeader(currentParentDomainObjectId.getGuid()); + currentParentDomainQualifiedName = (String) currentParentDomainHeader.getAttribute(QUALIFIED_NAME); + } + + AtlasEntityHeader newParentDomainHeader = getParent(entity); + if (newParentDomainHeader != null) { + newParentDomainQualifiedName = (String) newParentDomainHeader.getAttribute(QUALIFIED_NAME); + + newSuperDomainQualifiedName = (String) newParentDomainHeader.getAttribute(SUPER_DOMAIN_QN_ATTR); + if(StringUtils.isEmpty(newSuperDomainQualifiedName)) { + newSuperDomainQualifiedName = newParentDomainQualifiedName; + } + } + + if (!newParentDomainQualifiedName.equals(currentParentDomainQualifiedName) && entity.hasRelationshipAttribute(PARENT_DOMAIN_REL_TYPE)) { + if(storedDomain.getRelationshipAttribute(PARENT_DOMAIN_REL_TYPE) == null && + StringUtils.isEmpty( (String) storedDomain.getAttribute(PARENT_DOMAIN_QN_ATTR))){ + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Cannot move Super Domain inside another domain"); + } + + //Auth check + isAuthorizedToMove(DATA_DOMAIN_ENTITY_TYPE, currentParentDomainHeader, newParentDomainHeader); + + processMoveSubDomainToAnotherDomain(entity, vertex, currentParentDomainQualifiedName, newParentDomainQualifiedName, vertexQnName, newSuperDomainQualifiedName); + + } else { + String domainCurrentName = vertex.getProperty(NAME, String.class); + String domainNewName = (String) entity.getAttribute(NAME); + + entity.removeAttribute(PARENT_DOMAIN_QN_ATTR); + entity.removeAttribute(SUPER_DOMAIN_QN_ATTR); + + if (!domainCurrentName.equals(domainNewName)) { + domainExists(domainNewName, currentParentDomainQualifiedName, storedDomain.getGuid()); + } + + } + RequestContext.get().endMetricRecord(metricRecorder); + } + + private void processMoveSubDomainToAnotherDomain(AtlasEntity domain, + AtlasVertex domainVertex, + String sourceDomainQualifiedName, + String targetDomainQualifiedName, + String currentDomainQualifiedName, + String superDomainQualifiedName) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("processMoveSubDomainToAnotherDomain"); + + try { + String domainName = (String) domain.getAttribute(NAME); + String updatedQualifiedName = ""; + + LOG.info("Moving subdomain {} to Domain {}", domainName, targetDomainQualifiedName); + + domainExists(domainName, targetDomainQualifiedName, domain.getGuid()); + + if(targetDomainQualifiedName.isEmpty()){ + //Moving subDomain to make it Super Domain + targetDomainQualifiedName = "default"; + updatedQualifiedName = currentDomainQualifiedName.replace(sourceDomainQualifiedName, targetDomainQualifiedName); + updatedQualifiedName = updatedQualifiedName + "/super"; + domain.setAttribute(QUALIFIED_NAME, updatedQualifiedName); + domain.setAttribute(PARENT_DOMAIN_QN_ATTR, null); + domain.setAttribute(SUPER_DOMAIN_QN_ATTR, null); + superDomainQualifiedName = updatedQualifiedName ; + } + else{ + if(StringUtils.isEmpty(sourceDomainQualifiedName)){ + updatedQualifiedName = createQualifiedName(targetDomainQualifiedName); + }else { + updatedQualifiedName = currentDomainQualifiedName.replace(sourceDomainQualifiedName, targetDomainQualifiedName); + } + + domain.setAttribute(QUALIFIED_NAME, updatedQualifiedName); + domain.setAttribute(PARENT_DOMAIN_QN_ATTR, targetDomainQualifiedName); + domain.setAttribute(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + } + + Iterator existingParentEdges = domainVertex.getEdges(AtlasEdgeDirection.IN, DOMAIN_PARENT_EDGE_LABEL).iterator(); + if (existingParentEdges.hasNext()) { + graph.removeEdge(existingParentEdges.next()); + } + + String currentQualifiedName = domainVertex.getProperty(QUALIFIED_NAME, String.class); + this.updatedPolicyResources.put("entity:" + currentQualifiedName, "entity:" + updatedQualifiedName); + this.updatedDomainQualifiedNames.put(currentQualifiedName, updatedQualifiedName); + + moveChildren(domainVertex, superDomainQualifiedName, updatedQualifiedName, sourceDomainQualifiedName, targetDomainQualifiedName); + updatePolicies(this.updatedPolicyResources, this.context); + updateStakeholderTitlesAndStakeholders(this.updatedDomainQualifiedNames, this.context); + + LOG.info("Moved subDomain {} to Domain {}", domainName, targetDomainQualifiedName); + + } finally { + RequestContext.get().endMetricRecord(recorder); + } + } + + private void moveChildren(AtlasVertex domainVertex, + String superDomainQualifiedName, + String parentDomainQualifiedName, + String sourceDomainQualifiedName, + String targetDomainQualifiedName) throws AtlasBaseException { + // move products to target Domain + Iterator products = getActiveChildrenVertices(domainVertex, DATA_PRODUCT_EDGE_LABEL); + while (products.hasNext()) { + AtlasVertex productVertex = products.next(); + moveChildDataProductToAnotherDomain(productVertex, superDomainQualifiedName, parentDomainQualifiedName, sourceDomainQualifiedName, targetDomainQualifiedName); + } + // Get all children domains of current domain + Iterator childDomains = getActiveChildrenVertices(domainVertex, DOMAIN_PARENT_EDGE_LABEL); + while (childDomains.hasNext()) { + AtlasVertex childVertex = childDomains.next(); + moveChildrenToAnotherDomain(childVertex, superDomainQualifiedName, parentDomainQualifiedName, sourceDomainQualifiedName, targetDomainQualifiedName); + } + } + + private void moveChildrenToAnotherDomain(AtlasVertex childDomainVertex, + String superDomainQualifiedName, + String parentDomainQualifiedName, + String sourceDomainQualifiedName, + String targetDomainQualifiedName) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("moveChildrenToAnotherDomain"); + + + try { + LOG.info("Moving child domain {} to Domain {}", childDomainVertex.getProperty(NAME, String.class), targetDomainQualifiedName); + Map updatedAttributes = new HashMap<>(); + + String currentDomainQualifiedName = childDomainVertex.getProperty(QUALIFIED_NAME, String.class); + String updatedDomainQualifiedName = parentDomainQualifiedName + getOwnQualifiedNameForChild(currentDomainQualifiedName); + + // Change domain qualifiedName + childDomainVertex.setProperty(QUALIFIED_NAME, updatedDomainQualifiedName); + updatedAttributes.put(QUALIFIED_NAME, updatedDomainQualifiedName); + + //change superDomainQN, parentDomainQN + childDomainVertex.setProperty(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + childDomainVertex.setProperty(PARENT_DOMAIN_QN_ATTR, parentDomainQualifiedName); + + //Store domainPolicies and resources to be updated + String currentResource = "entity:"+ currentDomainQualifiedName; + String updatedResource = "entity:"+ updatedDomainQualifiedName; + this.updatedPolicyResources.put(currentResource, updatedResource); + this.updatedDomainQualifiedNames.put(currentDomainQualifiedName, updatedDomainQualifiedName); + + //update system properties + GraphHelper.setModifiedByAsString(childDomainVertex, RequestContext.get().getUser()); + GraphHelper.setModifiedTime(childDomainVertex, System.currentTimeMillis()); + + // move products to target Domain + Iterator products = getActiveChildrenVertices(childDomainVertex, DATA_PRODUCT_EDGE_LABEL); + + while (products.hasNext()) { + AtlasVertex productVertex = products.next(); + moveChildDataProductToAnotherDomain(productVertex, superDomainQualifiedName, updatedDomainQualifiedName, sourceDomainQualifiedName, targetDomainQualifiedName); + } + + // Get all children domains of current domain + Iterator childDomains = getActiveChildrenVertices(childDomainVertex, DOMAIN_PARENT_EDGE_LABEL); + + while (childDomains.hasNext()) { + AtlasVertex childVertex = childDomains.next(); + moveChildrenToAnotherDomain(childVertex, superDomainQualifiedName, updatedDomainQualifiedName, sourceDomainQualifiedName, targetDomainQualifiedName); + } + + recordUpdatedChildEntities(childDomainVertex, updatedAttributes); + + LOG.info("Moved child domain {} to Domain {}", childDomainVertex.getProperty(NAME, String.class), targetDomainQualifiedName); + } finally { + RequestContext.get().endMetricRecord(recorder); + } + } + + private void moveChildDataProductToAnotherDomain(AtlasVertex productVertex, + String superDomainQualifiedName, + String parentDomainQualifiedName, + String sourceDomainQualifiedName, + String targetDomainQualifiedName) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("moveChildDataProductToAnotherDomain"); + + try { + String productName = productVertex.getProperty(NAME, String.class); + LOG.info("Moving dataProduct {} to Domain {}", productName, targetDomainQualifiedName); + Map updatedAttributes = new HashMap<>(); + + String currentQualifiedName = productVertex.getProperty(QUALIFIED_NAME, String.class); + String updatedQualifiedName = parentDomainQualifiedName + getOwnQualifiedNameForChild(currentQualifiedName); + + productVertex.setProperty(QUALIFIED_NAME, updatedQualifiedName); + updatedAttributes.put(QUALIFIED_NAME, updatedQualifiedName); + + productVertex.setProperty(PARENT_DOMAIN_QN_ATTR, parentDomainQualifiedName); + productVertex.setProperty(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + + //Store domainPolicies and resources to be updated + String currentResource = "entity:"+ currentQualifiedName; + String updatedResource = "entity:"+ updatedQualifiedName; + this.updatedPolicyResources.put(currentResource, updatedResource); + + //update system properties + GraphHelper.setModifiedByAsString(productVertex, RequestContext.get().getUser()); + GraphHelper.setModifiedTime(productVertex, System.currentTimeMillis()); + + recordUpdatedChildEntities(productVertex, updatedAttributes); + + LOG.info("Moved dataProduct {} to Domain {}", productName, targetDomainQualifiedName); + } finally { + RequestContext.get().endMetricRecord(recorder); + } + } + + private AtlasEntityHeader getParent(AtlasEntity domainEntity) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("DataDomainPreProcessor.getParent"); + + AtlasObjectId objectId = (AtlasObjectId) domainEntity.getRelationshipAttribute(PARENT_DOMAIN_REL_TYPE); + + RequestContext.get().endMetricRecord(metricRecorder); + return getParent(objectId, PARENT_ATTRIBUTES); + } + + private void domainExists(String domainName, String parentDomainQualifiedName,String guid) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("domainExists"); + try { + exists(DATA_DOMAIN_ENTITY_TYPE, domainName, parentDomainQualifiedName, guid); + + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + private static String createQualifiedName(String parentDomainQualifiedName) { + if (StringUtils.isNotEmpty(parentDomainQualifiedName)) { + return parentDomainQualifiedName + "/domain/" + getUUID(); + } else{ + return "default/domain/" + getUUID() + "/super"; + } + } + + private String getOwnQualifiedNameForChild(String childQualifiedName) { + String[] splitted = childQualifiedName.split("/"); + return String.format("/%s/%s", splitted[splitted.length -2], splitted[splitted.length -1]); + } + + private void validateStakeholderRelationship(AtlasEntity entity) throws AtlasBaseException { + if(entity.hasRelationshipAttribute(STAKEHOLDER_REL_TYPE)){ + throw new AtlasBaseException(AtlasErrorCode.OPERATION_NOT_SUPPORTED, "Managing Stakeholders while creating/updating a domain"); + } + } + + public List getStakeholderTitle(String domainQualifiedName) throws AtlasBaseException { + List> mustClauseList = new ArrayList<>(); + mustClauseList.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", STAKEHOLDER_TITLE_ENTITY_TYPE))); + + List termsList = Arrays.asList( + NEW_STAR, + STAR, + domainQualifiedName + ); + + Map termsMap = mapOf(ATTR_DOMAIN_QUALIFIED_NAMES, termsList); + Map termsFilter = mapOf("terms", termsMap); + + mustClauseList.add(termsFilter); + + Map boolQuery = mapOf("must", mustClauseList); + Map query = mapOf("bool", boolQuery); + Map dsl = mapOf("query", query); + + List assets = indexSearchPaginated(dsl, null, super.discovery); + + return assets; + } + + + @Override + public void processDelete(AtlasVertex vertex) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processDomainDelete"); + + try{ + List stakeHolderGuids = new ArrayList<>(); + + // active childrens exists? + Iterator childrens = getActiveChildrenVertices(vertex, + DOMAIN_PARENT_EDGE_LABEL, DATA_PRODUCT_EDGE_LABEL); + if (childrens.hasNext()){ + throw new AtlasBaseException(AtlasErrorCode.OPERATION_NOT_SUPPORTED, "Domain cannot be archived because some subdomains or products are active in this domain"); + } + + // active stakeholder exists? + childrens = getActiveChildrenVertices(vertex, STAKEHOLDER_EDGE_LABEL); + while (childrens.hasNext()){ + AtlasVertex child = childrens.next(); + AtlasObjectId childId = entityRetriever.toAtlasObjectId(child); + stakeHolderGuids.add(childId.getGuid()); + } + + if (CollectionUtils.isNotEmpty(stakeHolderGuids)) { + entityStore.deleteByIds(stakeHolderGuids); + LOG.info("Deleted Stakeholders: {}", stakeHolderGuids); + } + + // active stakeholder titles exists? + List stakeholderTitles = getStakeholderTitle(vertex.getProperty(QUALIFIED_NAME, String.class)); + if (CollectionUtils.isNotEmpty(stakeholderTitles)) { + for (AtlasEntityHeader stakeholderTitle : stakeholderTitles) { + AtlasVertex stakeholderTitleVertex = entityRetriever.getEntityVertex(stakeholderTitle.getGuid()); + AtlasGraphUtilsV2.removeItemFromListPropertyValue(stakeholderTitleVertex, ATTR_DOMAIN_QUALIFIED_NAMES, vertex.getProperty(QUALIFIED_NAME, String.class)); + List domainQualifiedNames = stakeholderTitleVertex.getMultiValuedProperty(ATTR_DOMAIN_QUALIFIED_NAMES, String.class); + + if (CollectionUtils.isEmpty(domainQualifiedNames)) { + Iterator stakeholders = getActiveChildrenVertices(stakeholderTitleVertex, STAKEHOLDER_TITLE_EDGE_LABEL); + if (!stakeholders.hasNext()) { + entityStore.deleteById(stakeholderTitle.getGuid()); + LOG.info("Deleted Stakeholder Title: {}", stakeholderTitle.getGuid()); + } + } + } + } + + } + finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } +} + + diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/DataProductPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/DataProductPreProcessor.java new file mode 100644 index 0000000000..bdf2df1cf2 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/DataProductPreProcessor.java @@ -0,0 +1,465 @@ +package org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh; + +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.DeleteType; +import org.apache.atlas.RequestContext; +import org.apache.atlas.authorize.AtlasAuthorizationUtils; +import org.apache.atlas.authorize.AtlasEntityAccessRequest; +import org.apache.atlas.authorize.AtlasPrivilege; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.instance.*; +import org.apache.atlas.repository.graphdb.AtlasEdge; +import org.apache.atlas.repository.graphdb.AtlasEdgeDirection; +import org.apache.atlas.repository.graphdb.AtlasGraph; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.store.graph.AtlasEntityStore; +import org.apache.atlas.repository.store.graph.v2.AtlasEntityStream; +import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; +import org.apache.atlas.repository.store.graph.v2.EntityMutationContext; +import org.apache.atlas.repository.store.graph.v2.EntityStream; +import org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils; +import org.apache.atlas.repository.util.AtlasEntityUtils; +import org.apache.atlas.type.AtlasEntityType; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static org.apache.atlas.AtlasErrorCode.OPERATION_NOT_SUPPORTED; +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; +import static org.apache.atlas.repository.util.AccessControlUtils.*; + +public class DataProductPreProcessor extends AbstractDomainPreProcessor { + private static final Logger LOG = LoggerFactory.getLogger(DataProductPreProcessor.class); + private static final String PRIVATE = "Private"; + private static final String PROTECTED = "Protected"; + private static final String PUBLIC = "Public"; + private static final String DATA_PRODUCT = "dataProduct"; + + private EntityMutationContext context; + private AtlasEntityStore entityStore; + private Map updatedPolicyResources; + private EntityGraphRetriever retrieverNoRelation = null; + + public DataProductPreProcessor(AtlasTypeRegistry typeRegistry, EntityGraphRetriever entityRetriever, + AtlasGraph graph, AtlasEntityStore entityStore) { + super(typeRegistry, entityRetriever, graph); + this.updatedPolicyResources = new HashMap<>(); + this.entityStore = entityStore; + this.retrieverNoRelation = new EntityGraphRetriever(graph, typeRegistry, true); + } + + @Override + public void processAttributes(AtlasStruct entityStruct, EntityMutationContext context, + EntityMutations.EntityOperation operation) throws AtlasBaseException { + if (LOG.isDebugEnabled()) { + LOG.debug("DataProductPreProcessor.processAttributes: pre processing {}, {}", + entityStruct.getAttribute(QUALIFIED_NAME), operation); + } + this.context = context; + + AtlasEntity entity = (AtlasEntity) entityStruct; + + AtlasVertex vertex = context.getVertex(entity.getGuid()); + + switch (operation) { + case CREATE: + processCreateProduct(entity, vertex); + break; + case UPDATE: + processUpdateProduct(entity, vertex); + break; + } + } + + private void processCreateProduct(AtlasEntity entity,AtlasVertex vertex) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processCreateProduct"); + AtlasObjectId parentDomainObject = (AtlasObjectId) entity.getRelationshipAttribute(DATA_DOMAIN_REL_TYPE); + String productName = (String) entity.getAttribute(NAME); + String parentDomainQualifiedName = ""; + + entity.removeAttribute(OUTPUT_PORT_GUIDS_ATTR); + entity.removeAttribute(INPUT_PORT_GUIDS_ATTR); + + if (parentDomainObject == null) { + throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Cannot create a Product without a Domain Relationship"); + } else { + AtlasVertex parentDomain = retrieverNoRelation.getEntityVertex(parentDomainObject); + parentDomainQualifiedName = parentDomain.getProperty(QUALIFIED_NAME, String.class); + + + entity.setAttribute(PARENT_DOMAIN_QN_ATTR, parentDomainQualifiedName); + + String superDomainQualifiedName = parentDomain.getProperty(SUPER_DOMAIN_QN_ATTR, String.class); + if(StringUtils.isEmpty(superDomainQualifiedName)) { + superDomainQualifiedName = parentDomainQualifiedName; + } + entity.setAttribute(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + } + + entity.setAttribute(QUALIFIED_NAME, createQualifiedName(parentDomainQualifiedName)); + + // Check if authorized to create entities + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_CREATE, new AtlasEntityHeader(entity)), + "create entity: type=", entity.getTypeName()); + + entity.setCustomAttributes(customAttributes); + + productExists(productName, parentDomainQualifiedName, null); + + createDaapVisibilityPolicy(entity, vertex); + + RequestContext.get().endMetricRecord(metricRecorder); + } + + private void processUpdateProduct(AtlasEntity entity, AtlasVertex vertex) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processUpdateProduct"); + + entity.removeAttribute(OUTPUT_PORT_GUIDS_ATTR); + entity.removeAttribute(INPUT_PORT_GUIDS_ATTR); + + if(entity.hasRelationshipAttribute(DATA_DOMAIN_REL_TYPE) && entity.getRelationshipAttribute(DATA_DOMAIN_REL_TYPE) == null){ + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "DataProduct can only be moved to another Domain."); + } + + String vertexQnName = vertex.getProperty(QUALIFIED_NAME, String.class); + entity.setAttribute(QUALIFIED_NAME, vertexQnName); + // Check if authorized to update entities + AtlasAuthorizationUtils.verifyUpdateEntityAccess(typeRegistry, new AtlasEntityHeader(entity),"update entity: type=" + entity.getTypeName()); + + AtlasEntity storedProduct = entityRetriever.toAtlasEntity(vertex); + AtlasRelatedObjectId currentParentDomainObjectId = (AtlasRelatedObjectId) storedProduct.getRelationshipAttribute(DATA_DOMAIN_REL_TYPE); + + String newParentDomainQualifiedName = null; + String currentParentDomainQualifiedName = null; + AtlasEntityHeader currentParentDomainHeader = null; + + if(currentParentDomainObjectId != null) { + currentParentDomainHeader = entityRetriever.toAtlasEntityHeader(currentParentDomainObjectId.getGuid()); + currentParentDomainQualifiedName = (String) currentParentDomainHeader.getAttribute(QUALIFIED_NAME); + } + + AtlasEntityHeader newParentDomainHeader = getParent(entity); + if (newParentDomainHeader != null) { + newParentDomainQualifiedName = (String) newParentDomainHeader.getAttribute(QUALIFIED_NAME); + } + + boolean isDaapVisibilityChanged = isDaapVisibilityChanged(storedProduct, entity); + + if (newParentDomainQualifiedName != null && !newParentDomainQualifiedName.equals(currentParentDomainQualifiedName)) { + + if(isDaapVisibilityChanged){ + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Moving the product to another domain along with the change in Daap visibility is not allowed"); + } + + //Auth check + isAuthorizedToMove(DATA_PRODUCT_ENTITY_TYPE, currentParentDomainHeader, newParentDomainHeader); + + String newSuperDomainQualifiedName = (String) newParentDomainHeader.getAttribute(SUPER_DOMAIN_QN_ATTR); + if(StringUtils.isEmpty(newSuperDomainQualifiedName)){ + newSuperDomainQualifiedName = newParentDomainQualifiedName; + } + + processMoveDataProductToAnotherDomain(entity, vertex, currentParentDomainQualifiedName, newParentDomainQualifiedName, vertexQnName, newSuperDomainQualifiedName); + + updatePolicies(this.updatedPolicyResources, this.context); + + } else { + entity.removeAttribute(PARENT_DOMAIN_QN_ATTR); + entity.removeAttribute(SUPER_DOMAIN_QN_ATTR); + String productCurrentName = vertex.getProperty(NAME, String.class); + String productNewName = (String) entity.getAttribute(NAME); + + if (!productCurrentName.equals(productNewName)) { + productExists(productNewName, currentParentDomainQualifiedName, storedProduct.getGuid()); + } + } + + if (isDaapVisibilityChanged) { + updateDaapVisibilityPolicy(entity, storedProduct); + } + else{ + // if isDaapVisibilityChanged is false, then do not update any daap visibility attributes in product entity as well + entity.removeAttribute(DAAP_VISIBILITY_USERS_ATTR); + entity.removeAttribute(DAAP_VISIBILITY_GROUPS_ATTR); + } + RequestContext.get().endMetricRecord(metricRecorder); + } + + private void processMoveDataProductToAnotherDomain(AtlasEntity product, + AtlasVertex productVertex, + String sourceDomainQualifiedName, + String targetDomainQualifiedName, + String currentDataProductQualifiedName, + String superDomainQualifiedName) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("processMoveDataProductToAnotherDomain"); + + try { + String productName = (String) product.getAttribute(NAME); + + LOG.info("Moving dataProduct {} to Domain {}", productName, targetDomainQualifiedName); + + productExists(productName, targetDomainQualifiedName, product.getGuid()); + + String updatedQualifiedName; + if(StringUtils.isEmpty(sourceDomainQualifiedName)){ + updatedQualifiedName = createQualifiedName(targetDomainQualifiedName); + } else { + updatedQualifiedName = currentDataProductQualifiedName.replace(sourceDomainQualifiedName, targetDomainQualifiedName); + } + + product.setAttribute(QUALIFIED_NAME, updatedQualifiedName); + product.setAttribute(PreProcessorUtils.PARENT_DOMAIN_QN_ATTR, targetDomainQualifiedName); + product.setAttribute(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + + Iterator existingParentEdges = productVertex.getEdges(AtlasEdgeDirection.IN, DATA_PRODUCT_EDGE_LABEL).iterator(); + if (existingParentEdges.hasNext()) { + graph.removeEdge(existingParentEdges.next()); + } + + //Store domainPolicies and resources to be updated + String currentResource = "entity:"+ currentDataProductQualifiedName; + String updatedResource = "entity:"+ updatedQualifiedName; + this.updatedPolicyResources.put(currentResource, updatedResource); + + LOG.info("Moved dataProduct {} to Domain {}", productName, targetDomainQualifiedName); + + } finally { + RequestContext.get().endMetricRecord(recorder); + } + } + + private AtlasEntityHeader getParent(AtlasEntity productEntity) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("DataProductPreProcessor.getParent"); + + Object relationshipAttribute = productEntity.getRelationshipAttribute(DATA_DOMAIN_REL_TYPE); + + RequestContext.get().endMetricRecord(metricRecorder); + return getParent(relationshipAttribute, PARENT_ATTRIBUTES); + } + + private void productExists(String productName, String parentDomainQualifiedName, String guid) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("productExists"); + + try { + exists(DATA_PRODUCT_ENTITY_TYPE, productName, parentDomainQualifiedName, guid); + + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + private static String createQualifiedName(String parentDomainQualifiedName) throws AtlasBaseException { + if (StringUtils.isEmpty(parentDomainQualifiedName)) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Parent Domain Qualified Name cannot be empty or null"); + } + return parentDomainQualifiedName + "/product/" + PreProcessorUtils.getUUID(); + + } + + private AtlasEntity getPolicyEntity(AtlasEntity entity, String productGuid ) { + AtlasEntity policy = new AtlasEntity(); + policy.setTypeName(POLICY_ENTITY_TYPE); + policy.setAttribute(NAME, entity.getAttribute(NAME)); + policy.setAttribute(QUALIFIED_NAME, productGuid + "/read-policy"); + policy.setAttribute(ATTR_POLICY_ACTIONS, Arrays.asList("entity-read")); + policy.setAttribute(ATTR_POLICY_CATEGORY, MESH_POLICY_CATEGORY); + policy.setAttribute(ATTR_POLICY_TYPE, POLICY_TYPE_ALLOW); + policy.setAttribute(ATTR_POLICY_RESOURCES, Arrays.asList("entity:" + entity.getAttribute(QUALIFIED_NAME))); + policy.setAttribute(ATTR_POLICY_RESOURCES_CATEGORY, POLICY_RESOURCE_CATEGORY_PERSONA_ENTITY); + policy.setAttribute(ATTR_POLICY_SERVICE_NAME, "atlas"); + policy.setAttribute(ATTR_POLICY_SUB_CATEGORY, DATA_PRODUCT); // create new constant attr + + return policy; + } + + private void createDaapVisibilityPolicy(AtlasEntity entity,AtlasVertex vertex) throws AtlasBaseException { + String productGuid = vertex.getProperty("__guid", String.class); + String vis = AtlasEntityUtils.getStringAttribute(entity,DAAP_VISIBILITY_ATTR); + + if (vis != null && !vis.equals(PRIVATE)){ + AtlasEntity policy = getPolicyEntity(entity, productGuid); + + switch (vis) { + case PROTECTED: + setProtectedPolicyAttributes(policy, entity); + break; + case PUBLIC: + setPublicPolicyAttributes(policy); + break; + } + createPolicy(policy); + } + } + + private void updateDaapVisibilityPolicy(AtlasEntity newEntity, AtlasEntity currentEntity) throws AtlasBaseException{ + String newProductDaapVisibility = AtlasEntityUtils.getStringAttribute(newEntity,DAAP_VISIBILITY_ATTR);// check case if attribute is not sent from FE + AtlasObjectId atlasObjectId = new AtlasObjectId(); + atlasObjectId.setTypeName(POLICY_ENTITY_TYPE); + atlasObjectId.setUniqueAttributes(AtlasEntityUtils.mapOf(QUALIFIED_NAME,currentEntity.getGuid()+"/read-policy")); + AtlasVertex policyVertex = null; + try { + policyVertex = entityRetriever.getEntityVertex(atlasObjectId); + } + catch(AtlasBaseException exp){ + if(!exp.getAtlasErrorCode().equals(AtlasErrorCode.INSTANCE_BY_UNIQUE_ATTRIBUTE_NOT_FOUND)){ + throw exp; + } + } + + AtlasEntity policy; + if (policyVertex == null) { + policy = getPolicyEntity(newEntity, newEntity.getGuid()); + } else { + policy = entityRetriever.toAtlasEntity(policyVertex); + } + + Map updatedAttributes = new HashMap<>(); + + if (newProductDaapVisibility.equals(PRIVATE)) { + updatedAttributes = setPrivatePolicyAttributes(policy); + } + else if (newProductDaapVisibility.equals(PROTECTED)) { + updatedAttributes = setProtectedPolicyAttributes(policy, + newEntity + ); + } + else if (newProductDaapVisibility.equals(PUBLIC)) { + updatedAttributes = setPublicPolicyAttributes(policy); + } + + if (policyVertex == null) { + createPolicy(policy); + } else { + updatePolicy(policy, policyVertex, updatedAttributes); + } + } + + private void createPolicy(AtlasEntity policy) throws AtlasBaseException{ + try { + RequestContext.get().setSkipAuthorizationCheck(true); + AtlasEntity.AtlasEntitiesWithExtInfo policiesExtInfo = new AtlasEntity.AtlasEntitiesWithExtInfo(); + policiesExtInfo.addEntity(policy); + EntityStream entityStream = new AtlasEntityStream(policiesExtInfo); + entityStore.createOrUpdate(entityStream, false); // adding new policy + } finally { + RequestContext.get().setSkipAuthorizationCheck(false); + } + } + + private void updatePolicy(AtlasEntity policy, AtlasVertex policyVertex,Map updatedAttributes) { + AtlasEntityType entityType = typeRegistry.getEntityTypeByName(POLICY_ENTITY_TYPE); + context.addUpdated(policy.getGuid(), policy, entityType, policyVertex); + recordUpdatedChildEntities(policyVertex, updatedAttributes); + } + + private Map setPrivatePolicyAttributes(AtlasEntity policy) { + Map updatedAttributes = new HashMap<>(); + policy.setAttribute(ATTR_POLICY_USERS, Arrays.asList()); + policy.setAttribute(ATTR_POLICY_GROUPS, Arrays.asList()); + policy.setAttribute(ATTR_POLICY_IS_ENABLED, false); + + updatedAttributes.put(ATTR_POLICY_USERS, Arrays.asList()); + updatedAttributes.put(ATTR_POLICY_GROUPS, Arrays.asList()); + updatedAttributes.put(ATTR_POLICY_IS_ENABLED, false); + + return updatedAttributes; + } + + private Map setProtectedPolicyAttributes(AtlasEntity policy, AtlasEntity entity) { + List users = AtlasEntityUtils.getListAttribute(entity, DAAP_VISIBILITY_USERS_ATTR); + List groups = AtlasEntityUtils.getListAttribute(entity, DAAP_VISIBILITY_GROUPS_ATTR); + + policy.setAttribute(ATTR_POLICY_USERS, users); + policy.setAttribute(ATTR_POLICY_GROUPS, groups); + policy.setAttribute(ATTR_POLICY_IS_ENABLED, true); + + Map updatedAttributes = new HashMap<>(); + updatedAttributes.put(ATTR_POLICY_USERS, users); + updatedAttributes.put(ATTR_POLICY_GROUPS, groups); + updatedAttributes.put(ATTR_POLICY_IS_ENABLED, true); + return updatedAttributes; + } + + private Map setPublicPolicyAttributes(AtlasEntity policy) { + Map updatedAttributes = new HashMap<>(); + policy.setAttribute(ATTR_POLICY_USERS, Arrays.asList()); + policy.setAttribute(ATTR_POLICY_GROUPS, Arrays.asList("public")); + policy.setAttribute(ATTR_POLICY_IS_ENABLED, true); + + updatedAttributes.put(ATTR_POLICY_USERS, Arrays.asList()); + updatedAttributes.put(ATTR_POLICY_GROUPS, Arrays.asList("public")); + updatedAttributes.put(ATTR_POLICY_IS_ENABLED, true); + return updatedAttributes; + } + + private Boolean isDaapVisibilityChanged(AtlasEntity storedProduct, AtlasEntity newProduct){ + + boolean isDaapVisibilityChanged; + // check for daapVisibility change + String currentProductDaapVisibility = AtlasEntityUtils.getStringAttribute(storedProduct, DAAP_VISIBILITY_ATTR); + String newProductDaapVisibility = AtlasEntityUtils.getStringAttribute(newProduct, DAAP_VISIBILITY_ATTR); // check case if attribute is not sent from FE + + if(newProductDaapVisibility == null){ + return false; + } + + isDaapVisibilityChanged = (!newProductDaapVisibility.equals(currentProductDaapVisibility)); + if(isDaapVisibilityChanged){ + return true; + } + + // check if new daap visibility and old daap visibility is protected then check if any user, groups added changed + if (newProductDaapVisibility.equals(PROTECTED) && currentProductDaapVisibility.equals(PROTECTED)){ + + List storedUsers = AtlasEntityUtils.getListAttribute(storedProduct, DAAP_VISIBILITY_USERS_ATTR); + List storedGroups = AtlasEntityUtils.getListAttribute(storedProduct, DAAP_VISIBILITY_GROUPS_ATTR); + List newUsers = AtlasEntityUtils.getListAttribute(newProduct, DAAP_VISIBILITY_USERS_ATTR); + List newGroups = AtlasEntityUtils.getListAttribute(newProduct, DAAP_VISIBILITY_GROUPS_ATTR); + + isDaapVisibilityChanged = compareLists(storedUsers, newUsers) || compareLists(storedGroups, newGroups); + } + + return isDaapVisibilityChanged; + } + + public static boolean compareLists(List list1, List list2) { + return !CollectionUtils.disjunction(list1, list2).isEmpty(); + } + + @Override + public void processDelete(AtlasVertex vertex) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processProductDelete"); + + try{ + if(RequestContext.get().getDeleteType() != DeleteType.SOFT){ + String productGuid = vertex.getProperty("__guid", String.class); + AtlasObjectId atlasObjectId = new AtlasObjectId(); + atlasObjectId.setTypeName(POLICY_ENTITY_TYPE); + atlasObjectId.setUniqueAttributes(AtlasEntityUtils.mapOf(QUALIFIED_NAME, productGuid+"/read-policy")); + AtlasVertex policyVertex; + try { + policyVertex = entityRetriever.getEntityVertex(atlasObjectId); + entityStore.deleteById(policyVertex.getProperty("__guid", String.class)); + } + catch(AtlasBaseException exp){ + if(!exp.getAtlasErrorCode().equals(AtlasErrorCode.INSTANCE_BY_UNIQUE_ATTRIBUTE_NOT_FOUND)){ + throw exp; + } + } + } + if(RequestContext.get().getDeleteType() == DeleteType.SOFT || RequestContext.get().getDeleteType() == DeleteType.DEFAULT){ + vertex.setProperty(DAAP_STATUS_ATTR, DAAP_ARCHIVED_STATUS); + } + } + finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + + } +} diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/StakeholderTitlePreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/StakeholderTitlePreProcessor.java new file mode 100644 index 0000000000..a500d88902 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/StakeholderTitlePreProcessor.java @@ -0,0 +1,282 @@ +package org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh; + +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.AtlasException; +import org.apache.atlas.RequestContext; +import org.apache.atlas.authorize.AtlasAuthorizationUtils; +import org.apache.atlas.authorize.AtlasEntityAccessRequest; +import org.apache.atlas.authorize.AtlasPrivilege; +import org.apache.atlas.discovery.EntityDiscoveryService; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasEntityHeader; +import org.apache.atlas.model.instance.AtlasRelatedObjectId; +import org.apache.atlas.model.instance.AtlasRelationship; +import org.apache.atlas.model.instance.AtlasStruct; +import org.apache.atlas.model.instance.EntityMutations; +import org.apache.atlas.repository.graphdb.AtlasGraph; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; +import org.apache.atlas.repository.store.graph.v2.EntityMutationContext; +import org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessor; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.commons.collections.CollectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +import static java.lang.String.format; +import static org.apache.atlas.AtlasErrorCode.BAD_REQUEST; +import static org.apache.atlas.AtlasErrorCode.OPERATION_NOT_SUPPORTED; +import static org.apache.atlas.repository.Constants.DATA_DOMAIN_ENTITY_TYPE; +import static org.apache.atlas.repository.Constants.NAME; +import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; +import static org.apache.atlas.repository.Constants.STAKEHOLDER_TITLE_ENTITY_TYPE; +import static org.apache.atlas.repository.graph.GraphHelper.getActiveChildrenVertices; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; +import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; + +public class StakeholderTitlePreProcessor implements PreProcessor { + + private static final Logger LOG = LoggerFactory.getLogger(StakeholderTitlePreProcessor.class); + + public static final String PATTERN_QUALIFIED_NAME_ALL_DOMAINS = "stakeholderTitle/domain/default/%s"; + public static final String PATTERN_QUALIFIED_NAME_DOMAIN = "stakeholderTitle/domain/%s"; + + + public static final String STAR = "*/super"; + public static final String NEW_STAR = "default/domain/*/super"; + public static final String ATTR_DOMAIN_QUALIFIED_NAMES = "stakeholderTitleDomainQualifiedNames"; + public static final String ATTR_STAKEHOLDER_DOMAIN_QUALIFIED_NAME = "stakeholderDomainQualifiedName"; + + public static final String REL_ATTR_STAKEHOLDERS = "stakeholders"; + + private final AtlasTypeRegistry typeRegistry; + private final EntityGraphRetriever entityRetriever; + protected EntityDiscoveryService discovery; + + public StakeholderTitlePreProcessor(AtlasGraph graph, + AtlasTypeRegistry typeRegistry, + EntityGraphRetriever entityRetriever) { + this.typeRegistry = typeRegistry; + this.entityRetriever = entityRetriever; + + try { + this.discovery = new EntityDiscoveryService(typeRegistry, graph, null, null, null, null); + } catch (AtlasException e) { + e.printStackTrace(); + } + } + + @Override + public void processAttributes(AtlasStruct entityStruct, EntityMutationContext context, + EntityMutations.EntityOperation operation) throws AtlasBaseException { + if (LOG.isDebugEnabled()) { + LOG.debug("StakeholderTitlePreProcessor.processAttributes: pre processing {}, {}", entityStruct.getAttribute(QUALIFIED_NAME), operation); + } + + AtlasEntity entity = (AtlasEntity) entityStruct; + + switch (operation) { + case CREATE: + processCreateStakeholderTitle(entity); + break; + case UPDATE: + processUpdateStakeholderTitle(context, entity); + break; + } + } + + private void processCreateStakeholderTitle(AtlasEntity entity) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processCreateStakeholderTitle"); + + try { + validateRelations(entity); + + if (RequestContext.get().isSkipAuthorizationCheck()) { + // To create bootstrap titles with provided qualifiedName + return; + } + + String name = (String) entity.getAttribute(NAME); + verifyDuplicateAssetByName(STAKEHOLDER_TITLE_ENTITY_TYPE, name, discovery, + format("Stakeholder title with name %s already exists", name)); + + List domainQualifiedNames = null; + if (entity.hasAttribute(ATTR_DOMAIN_QUALIFIED_NAMES)) { + Object qNamesAsObject = entity.getAttribute(ATTR_DOMAIN_QUALIFIED_NAMES); + if (qNamesAsObject != null) { + domainQualifiedNames = (List) qNamesAsObject; + } + } + + if (CollectionUtils.isEmpty(domainQualifiedNames)) { + throw new AtlasBaseException(BAD_REQUEST, "Please provide attribute " + ATTR_DOMAIN_QUALIFIED_NAMES); + } + if (domainQualifiedNames.contains(NEW_STAR) || domainQualifiedNames.contains(STAR)) { + if (domainQualifiedNames.size() > 1) { + domainQualifiedNames.clear(); + domainQualifiedNames.add(NEW_STAR); + entity.setAttribute(ATTR_DOMAIN_QUALIFIED_NAMES, domainQualifiedNames); + }else { + domainQualifiedNames.replaceAll(s -> s.equals(STAR) ? NEW_STAR : s); + } + + String qualifiedName = format(PATTERN_QUALIFIED_NAME_ALL_DOMAINS, getUUID()); + entity.setAttribute(QUALIFIED_NAME, qualifiedName); + + } else { + entity.setAttribute(QUALIFIED_NAME, format(PATTERN_QUALIFIED_NAME_DOMAIN, getUUID())); + } + + authorizeDomainAccess(domainQualifiedNames); + + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + private void processUpdateStakeholderTitle(EntityMutationContext context, AtlasEntity entity) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processUpdateStakeholderTitle"); + + try { + if (RequestContext.get().isSkipAuthorizationCheck()) { + // To create bootstrap titles with provided aualifiedName + return; + } + + validateRelations(entity); + + AtlasVertex vertex = context.getVertex(entity.getGuid()); + + String currentName = vertex.getProperty(NAME, String.class); + String newName = (String) entity.getAttribute(NAME); + if (!currentName.equals(newName)) { + verifyDuplicateAssetByName(STAKEHOLDER_TITLE_ENTITY_TYPE, newName, discovery, + format("StakeholderTitle with name %s already exists", newName)); + } + + List domainQualifiedNames = null; + List currentDomainQualifiedNames = vertex.getMultiValuedProperty(ATTR_DOMAIN_QUALIFIED_NAMES, String.class);; + if (entity.hasAttribute(ATTR_DOMAIN_QUALIFIED_NAMES)) { + Object qNamesAsObject = entity.getAttribute(ATTR_DOMAIN_QUALIFIED_NAMES); + if (qNamesAsObject != null) { + domainQualifiedNames = (List) qNamesAsObject; + if(CollectionUtils.isEqualCollection(domainQualifiedNames, currentDomainQualifiedNames)) { + domainQualifiedNames = currentDomainQualifiedNames; + } + else{ + handleDomainQualifiedNamesUpdate(entity, vertex, domainQualifiedNames, currentDomainQualifiedNames); + } + } + } + + if (CollectionUtils.isEmpty(domainQualifiedNames)) { + domainQualifiedNames = currentDomainQualifiedNames; + } + + authorizeDomainAccess(domainQualifiedNames); + + String vertexQName = vertex.getProperty(QUALIFIED_NAME, String.class); + entity.setAttribute(QUALIFIED_NAME, vertexQName); + + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + @Override + public void processDelete(AtlasVertex vertex) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processDeleteStakeholderTitle"); + + try { + AtlasEntity titleEntity = entityRetriever.toAtlasEntity(vertex); + + List stakeholders = null; + Object stakeholdersAsObject = titleEntity.getRelationshipAttribute(REL_ATTR_STAKEHOLDERS); + if (stakeholdersAsObject != null) { + stakeholders = (List) stakeholdersAsObject; + } + + if (CollectionUtils.isNotEmpty(stakeholders)) { + Optional activeStakeholder = stakeholders.stream().filter(x -> x.getRelationshipStatus() == AtlasRelationship.Status.ACTIVE).findFirst(); + if (activeStakeholder.isPresent()) { + throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Can not delete StakeholderTitle as it has reference to Active Stakeholder"); + } + + List domainQualifiedNames = vertex.getMultiValuedProperty(ATTR_DOMAIN_QUALIFIED_NAMES, String.class); + + authorizeDomainAccess(domainQualifiedNames); + } + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + private List getRemovedItems(List oldList, List newList) { + return oldList.stream() + .filter(qName -> !newList.contains(qName)) + .collect(Collectors.toList()); + } + + private boolean isStakeholderAssociatedWithRemovedItems(AtlasVertex vertex, List removedItems) throws AtlasBaseException { + Iterator childrens = getActiveChildrenVertices(vertex, STAKEHOLDER_TITLE_EDGE_LABEL); + while (childrens.hasNext()) { + if(removedItems.contains(STAR) || removedItems.contains(NEW_STAR)) { + return true; + } + AtlasVertex child = childrens.next(); + String domainQualifiedName = child.getProperty(ATTR_STAKEHOLDER_DOMAIN_QUALIFIED_NAME, String.class); + for (String removedItem : removedItems) { + if (domainQualifiedName.equals(removedItem)) { + return true; + } + } + } + return false; + } + + private void handleDomainQualifiedNamesUpdate(AtlasEntity entity, AtlasVertex vertex, List domainQualifiedNames, List currentDomainQualifiedNames) throws AtlasBaseException { + if(domainQualifiedNames.contains(STAR) || domainQualifiedNames.contains(NEW_STAR)) { + domainQualifiedNames.clear(); + domainQualifiedNames.add(NEW_STAR); + entity.setAttribute(ATTR_DOMAIN_QUALIFIED_NAMES, domainQualifiedNames); + } + else{ + List removedItems = getRemovedItems(currentDomainQualifiedNames, domainQualifiedNames); + if (!removedItems.isEmpty() && isStakeholderAssociatedWithRemovedItems(vertex, removedItems)) { + throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Cannot remove Domain as StakeholderTitle has reference to Stakeholder in that Domain"); + } + } + } + + private void authorizeDomainAccess(List domainQualifiedNames) throws AtlasBaseException { + for (String domainQualifiedName: domainQualifiedNames) { + String domainQualifiedNameToAuth; + + if (domainQualifiedNames.contains(STAR) || domainQualifiedNames.contains(NEW_STAR)) { + domainQualifiedNameToAuth = NEW_STAR; + } else { + domainQualifiedNameToAuth = domainQualifiedName; + } + + AtlasEntityHeader domainHeaderToAuth = new AtlasEntityHeader(DATA_DOMAIN_ENTITY_TYPE, mapOf(QUALIFIED_NAME, domainQualifiedNameToAuth)); + + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_UPDATE, new AtlasEntityHeader(domainHeaderToAuth)), + "mutate StakeholderTitle for domain ", domainQualifiedName); + } + } + + private void validateRelations(AtlasEntity entity) throws AtlasBaseException { + if (entity.hasRelationshipAttribute(REL_ATTR_STAKEHOLDERS)) { + throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Managing Stakeholders while creating/updating StakeholderTitle"); + } + } +} + diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/AbstractGlossaryPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/AbstractGlossaryPreProcessor.java index 91950f783c..383273d73a 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/AbstractGlossaryPreProcessor.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/AbstractGlossaryPreProcessor.java @@ -26,7 +26,6 @@ import org.apache.atlas.authorize.AtlasPrivilege; import org.apache.atlas.discovery.EntityDiscoveryService; import org.apache.atlas.exception.AtlasBaseException; -import org.apache.atlas.model.discovery.IndexSearchParams; import org.apache.atlas.model.instance.AtlasEntity; import org.apache.atlas.model.instance.AtlasEntityHeader; import org.apache.atlas.model.instance.AtlasObjectId; @@ -41,28 +40,24 @@ import org.apache.atlas.tasks.TaskManagement; import org.apache.atlas.type.AtlasEntityType; import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.util.lexoRank.LexoRank; import org.apache.atlas.utils.AtlasPerfMetrics; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; import java.util.stream.Collectors; import static org.apache.atlas.repository.Constants.ATLAS_GLOSSARY_TERM_ENTITY_TYPE; import static org.apache.atlas.repository.Constants.ELASTICSEARCH_PAGINATION_SIZE; import static org.apache.atlas.repository.Constants.NAME; import static org.apache.atlas.repository.Constants.STATE_PROPERTY_KEY; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.indexSearchPaginated; import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; -import static org.apache.atlas.type.Constants.MEANINGS_PROPERTY_KEY; -import static org.apache.atlas.type.Constants.MEANINGS_TEXT_PROPERTY_KEY; -import static org.apache.atlas.type.Constants.MEANING_NAMES_PROPERTY_KEY; -import static org.apache.atlas.type.Constants.PENDING_TASKS_PROPERTY_KEY; +import static org.apache.atlas.type.Constants.*; +import static org.apache.atlas.type.Constants.LEXICOGRAPHICAL_SORT_ORDER; public abstract class AbstractGlossaryPreProcessor implements PreProcessor { private static final Logger LOG = LoggerFactory.getLogger(AbstractGlossaryPreProcessor.class); @@ -72,6 +67,7 @@ public abstract class AbstractGlossaryPreProcessor implements PreProcessor { protected static final String ATTR_MEANINGS = "meanings"; protected static final String ATTR_CATEGORIES = "categories"; + protected final AtlasTypeRegistry typeRegistry; protected final EntityGraphRetriever entityRetriever; protected final TaskManagement taskManagement; @@ -103,7 +99,7 @@ public void termExists(String termName, String glossaryQName) throws AtlasBaseEx Map dsl = mapOf("query", mapOf("bool", mapOf("must", mustClauseList))); - List terms = indexSearchPaginated(dsl); + List terms = indexSearchPaginated(dsl, null, this.discovery); if (CollectionUtils.isNotEmpty(terms)) { ret = terms.stream().map(term -> (String) term.getAttribute(NAME)).anyMatch(name -> termName.equals(name)); @@ -137,38 +133,6 @@ public boolean checkEntityTermAssociation(String termQName) throws AtlasBaseExce return entityHeader != null; } - public List indexSearchPaginated(Map dsl) throws AtlasBaseException { - IndexSearchParams searchParams = new IndexSearchParams(); - List ret = new ArrayList<>(); - - List sortList = new ArrayList<>(0); - sortList.add(mapOf("__timestamp", mapOf("order", "asc"))); - sortList.add(mapOf("__guid", mapOf("order", "asc"))); - dsl.put("sort", sortList); - - int from = 0; - int size = 100; - boolean hasMore = true; - do { - dsl.put("from", from); - dsl.put("size", size); - searchParams.setDsl(dsl); - - List headers = discovery.directIndexSearch(searchParams).getEntities(); - - if (CollectionUtils.isNotEmpty(headers)) { - ret.addAll(headers); - } else { - hasMore = false; - } - - from += size; - - } while (hasMore); - - return ret; - } - public void updateMeaningsAttributesInEntitiesOnTermUpdate(String currentTermName, String updatedTermName, String termQName, String updatedTermQName, String termGuid) throws AtlasBaseException { diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/CategoryPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/CategoryPreProcessor.java index eb39ff3b1d..0d3e26b83d 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/CategoryPreProcessor.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/CategoryPreProcessor.java @@ -47,6 +47,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.*; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -54,6 +55,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.Objects; import java.util.stream.Collectors; import static org.apache.atlas.AtlasErrorCode.BAD_REQUEST; @@ -70,9 +72,7 @@ import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; import static org.apache.atlas.repository.store.graph.v2.tasks.MeaningsTaskFactory.UPDATE_ENTITY_MEANINGS_ON_TERM_UPDATE; import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; -import static org.apache.atlas.type.Constants.CATEGORIES_PARENT_PROPERTY_KEY; -import static org.apache.atlas.type.Constants.CATEGORIES_PROPERTY_KEY; -import static org.apache.atlas.type.Constants.GLOSSARY_PROPERTY_KEY; +import static org.apache.atlas.type.Constants.*; public class CategoryPreProcessor extends AbstractGlossaryPreProcessor { private static final Logger LOG = LoggerFactory.getLogger(CategoryPreProcessor.class); @@ -117,6 +117,7 @@ public void processAttributes(AtlasStruct entityStruct, EntityMutationContext co private void processCreateCategory(AtlasEntity entity, AtlasVertex vertex) throws AtlasBaseException { AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processCreateCategory"); String catName = (String) entity.getAttribute(NAME); + String parentQname = null; if (StringUtils.isEmpty(catName) || isNameInvalid(catName)) { throw new AtlasBaseException(AtlasErrorCode.INVALID_DISPLAY_NAME); @@ -126,6 +127,16 @@ private void processCreateCategory(AtlasEntity entity, AtlasVertex vertex) throw categoryExists(catName, glossaryQualifiedName); validateParent(glossaryQualifiedName); + if (parentCategory != null) { + parentQname = (String) parentCategory.getAttribute(QUALIFIED_NAME); + } + String lexicographicalSortOrder = (String) entity.getAttribute(LEXICOGRAPHICAL_SORT_ORDER); + if(StringUtils.isEmpty(lexicographicalSortOrder)){ + assignNewLexicographicalSortOrder(entity,glossaryQualifiedName, parentQname, this.discovery); + } else { + isValidLexoRank(lexicographicalSortOrder, glossaryQualifiedName, parentQname, this.discovery); + } + entity.setAttribute(QUALIFIED_NAME, createQualifiedName(vertex)); AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_CREATE, new AtlasEntityHeader(entity)), "create entity: type=", entity.getTypeName()); @@ -151,6 +162,17 @@ private void processUpdateCategory(AtlasEntity entity, AtlasVertex vertex) throw String newGlossaryQualifiedName = (String) anchor.getAttribute(QUALIFIED_NAME); + String lexicographicalSortOrder = (String) entity.getAttribute(LEXICOGRAPHICAL_SORT_ORDER); + String parentQname = ""; + if(Objects.nonNull(parentCategory)) { + parentQname = (String) parentCategory.getAttribute(QUALIFIED_NAME); + } + if(StringUtils.isNotEmpty(lexicographicalSortOrder)) { + isValidLexoRank(lexicographicalSortOrder, newGlossaryQualifiedName, parentQname, this.discovery); + } else { + entity.removeAttribute(LEXICOGRAPHICAL_SORT_ORDER); + } + if (!currentGlossaryQualifiedName.equals(newGlossaryQualifiedName)){ //Auth check isAuthorized(currentGlossaryHeader, anchor); @@ -273,6 +295,7 @@ public void moveChildTermToAnotherGlossary(AtlasVertex termVertex, //check duplicate term name termExists(termName, targetGlossaryQualifiedName); + ensureOnlyOneCategoryIsAssociated(termVertex); String currentTermQualifiedName = termVertex.getProperty(QUALIFIED_NAME, String.class); String updatedTermQualifiedName = currentTermQualifiedName.replace(sourceGlossaryQualifiedName, targetGlossaryQualifiedName); @@ -309,6 +332,15 @@ public void moveChildTermToAnotherGlossary(AtlasVertex termVertex, } } + private void ensureOnlyOneCategoryIsAssociated(AtlasVertex vertex) throws AtlasBaseException { + final Integer numOfCategoryEdges = GraphHelper.getCountOfCategoryEdges(vertex); + + if(numOfCategoryEdges>1) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Cannot move term with multiple " + + "categories associated to another glossary"); + } + } + private void validateParentForGlossaryChange(AtlasEntity category, AtlasVertex categoryVertex, String targetGlossaryQualifiedName) throws AtlasBaseException { @@ -356,7 +388,7 @@ private void categoryExists(String categoryName, String glossaryQualifiedName) t Map dsl = mapOf("query", mapOf("bool", bool)); - List categories = indexSearchPaginated(dsl); + List categories = indexSearchPaginated(dsl, null, this.discovery); if (CollectionUtils.isNotEmpty(categories)) { for (AtlasEntityHeader category : categories) { @@ -489,4 +521,5 @@ private String createQualifiedName(AtlasVertex vertex) { return getUUID() + "@" + anchor.getAttribute(QUALIFIED_NAME); } + } diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/GlossaryPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/GlossaryPreProcessor.java index 6e3c962426..fc0bec0654 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/GlossaryPreProcessor.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/GlossaryPreProcessor.java @@ -20,10 +20,12 @@ import org.apache.atlas.AtlasErrorCode; import org.apache.atlas.RequestContext; +import org.apache.atlas.discovery.EntityDiscoveryService; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.instance.AtlasEntity; import org.apache.atlas.model.instance.AtlasStruct; import org.apache.atlas.model.instance.EntityMutations; +import org.apache.atlas.repository.graphdb.AtlasGraph; import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2; import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; @@ -37,18 +39,24 @@ import static org.apache.atlas.repository.Constants.NAME; import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; -import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.getUUID; -import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.isNameInvalid; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; +import static org.apache.atlas.type.Constants.LEXICOGRAPHICAL_SORT_ORDER; public class GlossaryPreProcessor implements PreProcessor { private static final Logger LOG = LoggerFactory.getLogger(GlossaryPreProcessor.class); private final AtlasTypeRegistry typeRegistry; private final EntityGraphRetriever entityRetriever; + protected EntityDiscoveryService discovery; - public GlossaryPreProcessor(AtlasTypeRegistry typeRegistry, EntityGraphRetriever entityRetriever) { + public GlossaryPreProcessor(AtlasTypeRegistry typeRegistry, EntityGraphRetriever entityRetriever, AtlasGraph graph) { this.entityRetriever = entityRetriever; this.typeRegistry = typeRegistry; + try{ + this.discovery = new EntityDiscoveryService(typeRegistry, graph, null, null, null, null); + } catch (Exception e) { + e.printStackTrace(); + } } @Override @@ -77,11 +85,19 @@ private void processCreateGlossary(AtlasStruct entity) throws AtlasBaseException if (StringUtils.isEmpty(glossaryName) || isNameInvalid(glossaryName)) { throw new AtlasBaseException(AtlasErrorCode.INVALID_DISPLAY_NAME); } + String lexicographicalSortOrder = (String) entity.getAttribute(LEXICOGRAPHICAL_SORT_ORDER); + if (glossaryExists(glossaryName)) { throw new AtlasBaseException(AtlasErrorCode.GLOSSARY_ALREADY_EXISTS,glossaryName); } + if(StringUtils.isEmpty(lexicographicalSortOrder)) { + assignNewLexicographicalSortOrder((AtlasEntity) entity, null, null, this.discovery); + } else { + isValidLexoRank(lexicographicalSortOrder, "", "", this.discovery); + } + entity.setAttribute(QUALIFIED_NAME, createQualifiedName()); RequestContext.get().endMetricRecord(metricRecorder); } @@ -90,7 +106,6 @@ private void processUpdateGlossary(AtlasStruct entity, AtlasVertex vertex) throw AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processUpdateGlossary"); String glossaryName = (String) entity.getAttribute(NAME); String vertexName = vertex.getProperty(NAME, String.class); - if (!vertexName.equals(glossaryName) && glossaryExists(glossaryName)) { throw new AtlasBaseException(AtlasErrorCode.GLOSSARY_ALREADY_EXISTS,glossaryName); } @@ -98,6 +113,12 @@ private void processUpdateGlossary(AtlasStruct entity, AtlasVertex vertex) throw if (StringUtils.isEmpty(glossaryName) || isNameInvalid(glossaryName)) { throw new AtlasBaseException(AtlasErrorCode.INVALID_DISPLAY_NAME); } + String lexicographicalSortOrder = (String) entity.getAttribute(LEXICOGRAPHICAL_SORT_ORDER); + if(StringUtils.isNotEmpty(lexicographicalSortOrder)) { + isValidLexoRank(lexicographicalSortOrder, "", "", this.discovery); + } else { + entity.removeAttribute(LEXICOGRAPHICAL_SORT_ORDER); + } String vertexQnName = vertex.getProperty(QUALIFIED_NAME, String.class); diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/TermPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/TermPreProcessor.java index 53e12ea93e..b8b4ff2e44 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/TermPreProcessor.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/TermPreProcessor.java @@ -45,11 +45,13 @@ import org.springframework.stereotype.Component; import java.util.Iterator; import java.util.List; +import java.util.Objects; import static org.apache.atlas.repository.Constants.*; import static org.apache.atlas.repository.graph.GraphHelper.getActiveParentVertices; import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; import static org.apache.atlas.repository.store.graph.v2.tasks.MeaningsTaskFactory.UPDATE_ENTITY_MEANINGS_ON_TERM_UPDATE; +import static org.apache.atlas.type.Constants.LEXICOGRAPHICAL_SORT_ORDER; @Component public class TermPreProcessor extends AbstractGlossaryPreProcessor { @@ -95,7 +97,14 @@ private void processCreateTerm(AtlasEntity entity, AtlasVertex vertex) throws At termExists(termName, glossaryQName); - validateCategory(entity); + String parentQname = validateAndGetCategory(entity); + + String lexicographicalSortOrder = (String) entity.getAttribute(LEXICOGRAPHICAL_SORT_ORDER); + if(StringUtils.isEmpty(lexicographicalSortOrder)){ + assignNewLexicographicalSortOrder(entity, glossaryQName, parentQname, this.discovery); + } else { + isValidLexoRank(lexicographicalSortOrder, glossaryQName, parentQname, this.discovery); + } entity.setAttribute(QUALIFIED_NAME, createQualifiedName()); AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_CREATE, new AtlasEntityHeader(entity)), @@ -114,7 +123,7 @@ private void processUpdateTerm(AtlasEntity entity, AtlasVertex vertex) throws At throw new AtlasBaseException(AtlasErrorCode.INVALID_DISPLAY_NAME); } - validateCategory(entity); + String parentQname = validateAndGetCategory(entity); AtlasEntity storedTerm = entityRetriever.toAtlasEntity(vertex); AtlasRelatedObjectId currentGlossary = (AtlasRelatedObjectId) storedTerm.getRelationshipAttribute(ANCHOR); @@ -125,6 +134,13 @@ private void processUpdateTerm(AtlasEntity entity, AtlasVertex vertex) throws At String newGlossaryQualifiedName = (String) anchor.getAttribute(QUALIFIED_NAME); + String lexicographicalSortOrder = (String) entity.getAttribute(LEXICOGRAPHICAL_SORT_ORDER); + if(StringUtils.isNotEmpty(lexicographicalSortOrder)) { + isValidLexoRank(lexicographicalSortOrder, newGlossaryQualifiedName, parentQname, this.discovery); + } else { + entity.removeAttribute(LEXICOGRAPHICAL_SORT_ORDER); + } + if (!currentGlossaryQualifiedName.equals(newGlossaryQualifiedName)){ //Auth check isAuthorized(currentGlossaryHeader, anchor); @@ -159,15 +175,28 @@ private void processUpdateTerm(AtlasEntity entity, AtlasVertex vertex) throws At RequestContext.get().endMetricRecord(metricRecorder); } - private void validateCategory(AtlasEntity entity) throws AtlasBaseException { + private static void ensureOnlyOneCategoryIsAssociated(AtlasEntity entity) throws AtlasBaseException { + if(entity.hasRelationshipAttribute(ATTR_CATEGORIES) && Objects.nonNull(entity.getRelationshipAttribute(ATTR_CATEGORIES))) { + List categories = (List) entity.getRelationshipAttribute(ATTR_CATEGORIES); + + if(categories.size() > 1) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Cannot move term with multiple " + + "categories associated to another glossary"); + } + + } + + } + + private String validateAndGetCategory(AtlasEntity entity) throws AtlasBaseException { String glossaryQualifiedName = (String) anchor.getAttribute(QUALIFIED_NAME); + String categoryQualifiedName = null; if (entity.hasRelationshipAttribute(ATTR_CATEGORIES) && entity.getRelationshipAttribute(ATTR_CATEGORIES) != null) { List categories = (List) entity.getRelationshipAttribute(ATTR_CATEGORIES); if (CollectionUtils.isNotEmpty(categories)) { AtlasObjectId category = categories.get(0); - String categoryQualifiedName; if (category.getUniqueAttributes() != null && category.getUniqueAttributes().containsKey(QUALIFIED_NAME)) { categoryQualifiedName = (String) category.getUniqueAttributes().get(QUALIFIED_NAME); @@ -181,6 +210,7 @@ private void validateCategory(AtlasEntity entity) throws AtlasBaseException { } } } + return categoryQualifiedName; } public String moveTermToAnotherGlossary(AtlasEntity entity, AtlasVertex vertex, diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagateTaskFactory.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagateTaskFactory.java index ca32e23408..65ac992b17 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagateTaskFactory.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagateTaskFactory.java @@ -54,6 +54,8 @@ public class ClassificationPropagateTaskFactory implements TaskFactory { public static final String CLASSIFICATION_PROPAGATION_RELATIONSHIP_UPDATE = "CLASSIFICATION_PROPAGATION_RELATIONSHIP_UPDATE"; + public static final String CLEANUP_CLASSIFICATION_PROPAGATION = "CLEANUP_CLASSIFICATION_PROPAGATION"; + public static final List supportedTypes = new ArrayList() {{ @@ -63,6 +65,7 @@ public class ClassificationPropagateTaskFactory implements TaskFactory { add(CLASSIFICATION_ONLY_PROPAGATION_DELETE_ON_HARD_DELETE); add(CLASSIFICATION_REFRESH_PROPAGATION); add(CLASSIFICATION_PROPAGATION_RELATIONSHIP_UPDATE); + add(CLEANUP_CLASSIFICATION_PROPAGATION); }}; @@ -102,6 +105,10 @@ public org.apache.atlas.tasks.AbstractTask create(AtlasTask task) { case CLASSIFICATION_PROPAGATION_RELATIONSHIP_UPDATE: return new ClassificationPropagationTasks.UpdateRelationship(task, graph, entityGraphMapper, deleteDelegate, relationshipStore); + case CLEANUP_CLASSIFICATION_PROPAGATION: + return new ClassificationPropagationTasks.CleanUpClassificationPropagation(task, graph, entityGraphMapper, deleteDelegate, relationshipStore); + + default: LOG.warn("Type: {} - {} not found!. The task will be ignored.", taskType, taskGuid); return null; diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagationTasks.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagationTasks.java index 69abc3aafc..d1191d3aac 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagationTasks.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagationTasks.java @@ -121,4 +121,17 @@ protected void run(Map parameters) throws AtlasBaseException { entityGraphMapper.updateTagPropagations(relationshipEdgeId, relationship); } } + + public static class CleanUpClassificationPropagation extends ClassificationTask { + public CleanUpClassificationPropagation(AtlasTask task, AtlasGraph graph, EntityGraphMapper entityGraphMapper, DeleteHandlerDelegate deleteDelegate, AtlasRelationshipStore relationshipStore) { + super(task, graph, entityGraphMapper, deleteDelegate, relationshipStore); + } + + @Override + protected void run(Map parameters) throws AtlasBaseException { + String classificationName = (String) parameters.get(PARAM_CLASSIFICATION_NAME); + + entityGraphMapper.cleanUpClassificationPropagation(classificationName); + } + } } diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationTask.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationTask.java index 76112dd685..f1796ad5bd 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationTask.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationTask.java @@ -50,6 +50,8 @@ public abstract class ClassificationTask extends AbstractTask { public static final String PARAM_RELATIONSHIP_GUID = "relationshipGuid"; public static final String PARAM_RELATIONSHIP_OBJECT = "relationshipObject"; public static final String PARAM_RELATIONSHIP_EDGE_ID = "relationshipEdgeId"; + + public static final String PARAM_CLASSIFICATION_NAME = "classificationName"; public static final String PARAM_REFERENCED_VERTEX_ID = "referencedVertexId"; public static final String PARAM_IS_TERM_ENTITY_EDGE = "isTermEntityEdge"; public static final String PARAM_PREVIOUS_CLASSIFICATION_RESTRICT_PROPAGATE_THROUGH_LINEAGE = "previousRestrictPropagationThroughLineage"; diff --git a/repository/src/main/java/org/apache/atlas/repository/util/AccessControlUtils.java b/repository/src/main/java/org/apache/atlas/repository/util/AccessControlUtils.java index d4686a9562..a7052a1ee3 100644 --- a/repository/src/main/java/org/apache/atlas/repository/util/AccessControlUtils.java +++ b/repository/src/main/java/org/apache/atlas/repository/util/AccessControlUtils.java @@ -18,7 +18,6 @@ package org.apache.atlas.repository.util; import org.apache.atlas.exception.AtlasBaseException; -import org.apache.atlas.featureflag.FeatureFlagStore; import org.apache.atlas.model.discovery.IndexSearchParams; import org.apache.atlas.model.instance.AtlasEntity; import org.apache.atlas.model.instance.AtlasEntityHeader; @@ -28,7 +27,6 @@ import org.apache.atlas.repository.graphdb.AtlasIndexQuery; import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.graphdb.DirectIndexQueryResult; -import org.apache.atlas.repository.store.graph.AtlasEntityStore; import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; import org.apache.atlas.util.NanoIdUtils; import org.apache.commons.collections.CollectionUtils; @@ -43,17 +41,8 @@ import java.util.stream.Collectors; import static org.apache.atlas.AtlasErrorCode.ACCESS_CONTROL_ALREADY_EXISTS; -import static org.apache.atlas.AtlasErrorCode.DISABLED_OPERATION; import static org.apache.atlas.AtlasErrorCode.OPERATION_NOT_SUPPORTED; -import static org.apache.atlas.repository.Constants.ATTR_ADMIN_GROUPS; -import static org.apache.atlas.repository.Constants.ATTR_ADMIN_ROLES; -import static org.apache.atlas.repository.Constants.ATTR_ADMIN_USERS; -import static org.apache.atlas.repository.Constants.ATTR_TENANT_ID; -import static org.apache.atlas.repository.Constants.CONNECTION_ENTITY_TYPE; -import static org.apache.atlas.repository.Constants.DEFAULT_TENANT_ID; -import static org.apache.atlas.repository.Constants.NAME; -import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; -import static org.apache.atlas.repository.Constants.VERTEX_INDEX_NAME; +import static org.apache.atlas.repository.Constants.*; import static org.apache.atlas.repository.util.AtlasEntityUtils.getListAttribute; import static org.apache.atlas.repository.util.AtlasEntityUtils.getQualifiedName; import static org.apache.atlas.repository.util.AtlasEntityUtils.getStringAttribute; @@ -79,6 +68,7 @@ public final class AccessControlUtils { public static final String ATTR_POLICY_ACTIONS = "policyActions"; public static final String ATTR_POLICY_CATEGORY = "policyCategory"; public static final String ATTR_POLICY_SUB_CATEGORY = "policySubCategory"; + public static final String ATTR_POLICY_FILTER_CRITERIA = "policyFilterCriteria"; public static final String ATTR_POLICY_RESOURCES = "policyResources"; public static final String ATTR_POLICY_IS_ENABLED = "isPolicyEnabled"; public static final String ATTR_POLICY_CONNECTION_QN = "connectionQualifiedName"; @@ -98,7 +88,8 @@ public final class AccessControlUtils { public static final String ACCESS_READ_PERSONA_DOMAIN = "persona-domain-read"; public static final String ACCESS_READ_PERSONA_SUB_DOMAIN = "persona-domain-sub-domain-read"; public static final String ACCESS_READ_PERSONA_PRODUCT = "persona-domain-product-read"; - + public static final String ACCESS_READ_DOMAIN = "domain-entity-read"; + public static final String POLICY_CATEGORY_PERSONA = "persona"; public static final String POLICY_CATEGORY_PURPOSE = "purpose"; public static final String POLICY_CATEGORY_DATAMESH = "datamesh"; @@ -128,6 +119,8 @@ public final class AccessControlUtils { public static final String INSTANCE_DOMAIN_KEY = "instance"; + public static final String POLICY_SERVICE_NAME_ABAC = "atlas_abac"; + private AccessControlUtils() {} public static String getEntityName(AtlasEntity entity) { @@ -179,6 +172,14 @@ public static String getPolicyCategory(AtlasEntity policyEntity) { return getStringAttribute(policyEntity, ATTR_POLICY_CATEGORY); } + public static String getPolicyFilterCriteria(AtlasEntity policyEntity) { + return getStringAttribute(policyEntity, ATTR_POLICY_FILTER_CRITERIA); + } + + public static String getPolicyFilterCriteria(AtlasEntityHeader policyEntity) { + return getStringAttribute(policyEntity, ATTR_POLICY_FILTER_CRITERIA); + } + public static String getPolicyResourceCategory(AtlasEntity policyEntity) { return getStringAttribute(policyEntity, ATTR_POLICY_RESOURCES_CATEGORY); } @@ -203,6 +204,10 @@ public static String getPolicyServiceName(AtlasEntity policyEntity) { return getStringAttribute(policyEntity, ATTR_POLICY_SERVICE_NAME); } + public static String getPolicyServiceName(AtlasEntityHeader policyEntity) { + return getStringAttribute(policyEntity, ATTR_POLICY_SERVICE_NAME); + } + public static String getPolicyType(AtlasEntity policyEntity) { return getStringAttribute(policyEntity, ATTR_POLICY_TYPE); } @@ -260,19 +265,18 @@ public static AtlasEntity extractConnectionFromResource(EntityGraphRetriever ent } public static String getPersonaRoleName(AtlasEntity persona) { - String qualifiedName = getStringAttribute(persona, QUALIFIED_NAME); - - String[] parts = qualifiedName.split("/"); - - return "persona_" + parts[parts.length - 1]; + return "persona_" + getESAliasName(persona); } public static String getESAliasName(AtlasEntity entity) { String qualifiedName = getStringAttribute(entity, QUALIFIED_NAME); + return getESAliasName(qualifiedName); + } + public static String getESAliasName(String qualifiedName) { String[] parts = qualifiedName.split("/"); - return parts[parts.length - 1]; + return parts[1]; } public static List getPolicies(AtlasEntity.AtlasEntityWithExtInfo accessControl) { @@ -345,7 +349,7 @@ public static String getTenantId(AtlasStruct entity) { public static void validateNoPoliciesAttached(AtlasEntity entity) throws AtlasBaseException { List policies = (List) entity.getRelationshipAttribute(REL_ATTR_POLICIES); if (CollectionUtils.isNotEmpty(policies)) { - throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Can not attach a policy while creating/updating Persona/Purpose"); + throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Can not attach a policy while creating/updating Persona/Purpose/Stakeholder"); } } @@ -379,7 +383,8 @@ public static void validateUniquenessByTags(AtlasGraph graph, List tags, private static boolean hasMatchingVertex(AtlasGraph graph, List newTags, IndexSearchParams indexSearchParams) throws AtlasBaseException { - AtlasIndexQuery indexQuery = graph.elasticsearchQuery(VERTEX_INDEX_NAME); + String vertexIndexName = getESIndex(); + AtlasIndexQuery indexQuery = graph.elasticsearchQuery(vertexIndexName); DirectIndexQueryResult indexQueryResult = indexQuery.vertices(indexSearchParams); Iterator iterator = indexQueryResult.getIterator(); diff --git a/repository/src/main/java/org/apache/atlas/repository/util/FilterUtil.java b/repository/src/main/java/org/apache/atlas/repository/util/FilterUtil.java index 01c6ee0990..4c03d2082f 100644 --- a/repository/src/main/java/org/apache/atlas/repository/util/FilterUtil.java +++ b/repository/src/main/java/org/apache/atlas/repository/util/FilterUtil.java @@ -29,6 +29,10 @@ import org.apache.commons.collections.functors.NotPredicate; import org.apache.commons.lang.StringUtils; +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.Objects; @@ -174,4 +178,26 @@ public static void addParamsToHideInternalType(SearchFilter searchFilter) { searchFilter.setParam(SearchFilter.PARAM_NOT_NAME, Constants.TYPE_NAME_INTERNAL); searchFilter.setParam(SearchFilter.PARAM_NOT_SUPERTYPE, Constants.TYPE_NAME_INTERNAL); } + + public static boolean validateFilePath(String fileToImport) { + + try { + String decodedPath = URLDecoder.decode(fileToImport, "UTF-8"); + + Path normalizedPath = Paths.get(decodedPath).normalize(); + if (decodedPath.contains("..") || decodedPath.contains("./") || decodedPath.contains(".\\")) { + return false; + } + + if (!normalizedPath.isAbsolute()) { + return false; + } + + return true; + } catch (UnsupportedEncodingException e) { + return false; + } catch (Exception e) { + return false; + } + } } diff --git a/repository/src/main/java/org/apache/atlas/services/MetricsService.java b/repository/src/main/java/org/apache/atlas/services/MetricsService.java index 5a8f445348..ef30675df1 100644 --- a/repository/src/main/java/org/apache/atlas/services/MetricsService.java +++ b/repository/src/main/java/org/apache/atlas/services/MetricsService.java @@ -98,7 +98,7 @@ public MetricsService(final AtlasGraph graph, final AtlasTypeRegistry typeRegist @SuppressWarnings("unchecked") @GraphTransaction public AtlasMetrics getMetrics() { - this.atlasGraph.setEnableCache(false); + final AtlasTypesDef typesDef = getTypesDef(); Collection entityDefs = typesDef.getEntityDefs(); diff --git a/repository/src/main/java/org/apache/atlas/tasks/AtlasTaskService.java b/repository/src/main/java/org/apache/atlas/tasks/AtlasTaskService.java index ba0fe1a4da..d8269633d4 100644 --- a/repository/src/main/java/org/apache/atlas/tasks/AtlasTaskService.java +++ b/repository/src/main/java/org/apache/atlas/tasks/AtlasTaskService.java @@ -148,7 +148,7 @@ public List createAtlasTasks(List tasks) throws AtlasBaseE if (!supportedTypes.contains(taskType)) { throw new AtlasBaseException(AtlasErrorCode.TASK_TYPE_NOT_SUPPORTED, task.getType()); } - if (isClassificationTaskType(taskType)) { + if (isClassificationTaskType(taskType) && !taskType.equals(ClassificationPropagateTaskFactory.CLEANUP_CLASSIFICATION_PROPAGATION)) { String classificationName = task.getClassificationName(); String entityGuid = task.getEntityGuid(); String classificationId = StringUtils.isEmpty(task.getClassificationId()) ? resolveAndReturnClassificationId(classificationName, entityGuid) : task.getClassificationId(); diff --git a/repository/src/main/java/org/apache/atlas/util/AtlasMetricsUtil.java b/repository/src/main/java/org/apache/atlas/util/AtlasMetricsUtil.java index 971c4d6404..beb90e67b1 100644 --- a/repository/src/main/java/org/apache/atlas/util/AtlasMetricsUtil.java +++ b/repository/src/main/java/org/apache/atlas/util/AtlasMetricsUtil.java @@ -194,11 +194,9 @@ public Map getStats() { private boolean getBackendStoreStatus(){ try { - boolean isCacheEnabled = this.graph.isCacheEnabled(); runWithTimeout(new Runnable() { @Override public void run() { - graph.setEnableCache(isCacheEnabled); graph.query().has(TYPE_NAME_PROPERTY_KEY, TYPE_NAME_INTERNAL).vertices(1); graphCommit(); @@ -219,11 +217,9 @@ private boolean getIndexStoreStatus(){ final String query = AtlasGraphUtilsV2.getIndexSearchPrefix() + "\"" + Constants.TYPE_NAME_PROPERTY_KEY + "\":(" + TYPE_NAME_INTERNAL + ")"; try { - boolean isCacheEnabled = this.graph.isCacheEnabled(); runWithTimeout(new Runnable() { @Override public void run() { - graph.setEnableCache(isCacheEnabled); graph.indexQuery(Constants.VERTEX_INDEX, query).vertices(0, 1); graphCommit(); diff --git a/repository/src/main/java/org/apache/atlas/util/lexoRank/LexoDecimal.java b/repository/src/main/java/org/apache/atlas/util/lexoRank/LexoDecimal.java new file mode 100644 index 0000000000..195970660a --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/util/lexoRank/LexoDecimal.java @@ -0,0 +1,178 @@ +package org.apache.atlas.util.lexoRank; + +import org.apache.atlas.util.lexoRank.system.LexoNumeralSystem; + +import java.util.Objects; + +public class LexoDecimal implements Comparable { + + private final LexoInteger mag; + private final int sig; + + private LexoDecimal(LexoInteger mag, int sig) { + this.mag = mag; + this.sig = sig; + } + + public static LexoDecimal half(LexoNumeralSystem sys) { + int mid = sys.getBase() / 2; + return make(LexoInteger.make(sys, 1, new int[] {mid}), 1); + } + + public static LexoDecimal parse(String str, LexoNumeralSystem system) { + int partialIndex = str.indexOf(system.getRadixPointChar()); + if (str.lastIndexOf(system.getRadixPointChar()) != partialIndex) + throw new IllegalArgumentException("More than one " + system.getRadixPointChar()); + + if (partialIndex < 0) return make(LexoInteger.parse(str, system), 0); + + String intStr = str.substring(0, partialIndex) + str.substring(partialIndex + 1); + return make(LexoInteger.parse(intStr, system), str.length() - 1 - partialIndex); + } + + public static LexoDecimal from(LexoInteger integer) { + return make(integer, 0); + } + + public static LexoDecimal make(LexoInteger integer, int sig) { + if (integer.isZero()) return new LexoDecimal(integer, 0); + + int zeroCount = 0; + + for (int i = 0; i < sig && integer.getMag(i) == 0; ++i) ++zeroCount; + + LexoInteger newInteger = integer.shiftRight(zeroCount); + int newSig = sig - zeroCount; + return new LexoDecimal(newInteger, newSig); + } + + public LexoNumeralSystem getSystem() { + return mag.getSystem(); + } + + public LexoDecimal add(LexoDecimal other) { + LexoInteger tMag = mag; + int tSig = sig; + LexoInteger oMag = other.mag; + + int oSig; + for (oSig = other.sig; tSig < oSig; ++tSig) tMag = tMag.shiftLeft(); + + while (tSig > oSig) { + oMag = oMag.shiftLeft(); + ++oSig; + } + + return make(tMag.add(oMag), tSig); + } + + public LexoDecimal subtract(LexoDecimal other) { + LexoInteger thisMag = mag; + int thisSig = sig; + LexoInteger otherMag = other.mag; + + int otherSig; + for (otherSig = other.sig; thisSig < otherSig; ++thisSig) thisMag = thisMag.shiftLeft(); + + while (thisSig > otherSig) { + otherMag = otherMag.shiftLeft(); + ++otherSig; + } + + return make(thisMag.subtract(otherMag), thisSig); + } + + public LexoDecimal multiply(LexoDecimal other) { + return make(mag.multiply(other.mag), sig + other.sig); + } + + public LexoInteger floor() { + return mag.shiftRight(sig); + } + + public LexoInteger ceil() { + if (isExact()) return mag; + + LexoInteger floor = floor(); + return floor.add(LexoInteger.one(floor.getSystem())); + } + + public boolean isExact() { + if (sig == 0) return true; + + for (int i = 0; i < sig; ++i) if (mag.getMag(i) != 0) return false; + + return true; + } + + public int getScale() { + return sig; + } + + public LexoDecimal setScale(int nSig) { + return setScale(nSig, false); + } + + public LexoDecimal setScale(int nSig, boolean ceiling) { + if (nSig >= sig) return this; + + if (nSig < 0) nSig = 0; + + int diff = sig - nSig; + LexoInteger nmag = mag.shiftRight(diff); + if (ceiling) nmag = nmag.add(LexoInteger.one(nmag.getSystem())); + + return make(nmag, nSig); + } + + public String format() { + String intStr = mag.format(); + if (sig == 0) return intStr; + + StringBuilder sb = new StringBuilder(intStr); + char head = sb.charAt(0); + boolean specialHead = + head == mag.getSystem().getPositiveChar() || head == mag.getSystem().getNegativeChar(); + if (specialHead) sb.delete(0, 1); + + while (sb.length() < sig + 1) sb.insert(0, mag.getSystem().toChar(0)); + + sb.insert(sb.length() - sig, mag.getSystem().getRadixPointChar()); + if (sb.length() - sig == 0) sb.insert(0, mag.getSystem().toChar(0)); + + if (specialHead) sb.insert(0, head); + + return sb.toString(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LexoDecimal that = (LexoDecimal) o; + return sig == that.sig && Objects.equals(mag, that.mag); + } + + @Override + public int hashCode() { + return Objects.hash(mag, sig); + } + + @Override + public String toString() { + return format(); + } + + @Override + public int compareTo(LexoDecimal lexoDecimal) { + if (Objects.equals(this, lexoDecimal)) return 0; + if (Objects.equals(null, lexoDecimal)) return 1; + + LexoInteger tMag = mag; + LexoInteger oMag = lexoDecimal.mag; + if (sig > lexoDecimal.sig) oMag = oMag.shiftLeft(sig - lexoDecimal.sig); + else if (sig < lexoDecimal.sig) tMag = tMag.shiftLeft(lexoDecimal.sig - sig); + + return tMag.compareTo(oMag); + } +} diff --git a/repository/src/main/java/org/apache/atlas/util/lexoRank/LexoInteger.java b/repository/src/main/java/org/apache/atlas/util/lexoRank/LexoInteger.java new file mode 100644 index 0000000000..187797a20b --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/util/lexoRank/LexoInteger.java @@ -0,0 +1,325 @@ +package org.apache.atlas.util.lexoRank; + + +import org.apache.atlas.util.lexoRank.system.LexoNumeralSystem; + +import java.util.Arrays; +import java.util.Objects; + +public class LexoInteger implements Comparable { + private static final int[] ZERO_MAG = {0}; + private static final int[] ONE_MAG = {1}; + private final int negativeSign = -1; + private final int zeroSign = 0; + private final int positiveSign = 1; + private final int[] mag; + private final int sign; + private final LexoNumeralSystem sys; + + private LexoInteger(LexoNumeralSystem system, int sign, int[] mag) { + sys = system; + this.sign = sign; + this.mag = mag; + } + + private static int[] add(LexoNumeralSystem sys, int[] l, int[] r) { + int estimatedSize = Math.max(l.length, r.length); + int[] result = new int[estimatedSize]; + int carry = 0; + + for (int i = 0; i < estimatedSize; ++i) { + int lNum = i < l.length ? l[i] : 0; + int rNum = i < r.length ? r[i] : 0; + int sum = lNum + rNum + carry; + + for (carry = 0; sum >= sys.getBase(); sum -= sys.getBase()) ++carry; + + result[i] = sum; + } + + return extendWithCarry(result, carry); + } + + private static int[] extendWithCarry(int[] mag, int carry) { + int[] result = mag; + if (carry > 0) { + int[] extendedMag = new int[mag.length + 1]; + System.arraycopy(mag, 0, extendedMag, 0, mag.length); + extendedMag[extendedMag.length - 1] = carry; + result = extendedMag; + } + + return result; + } + + private static int[] subtract(LexoNumeralSystem sys, int[] l, int[] r) { + int[] rComplement = complement(sys, r, l.length); + int[] rSum = add(sys, l, rComplement); + rSum[rSum.length - 1] = 0; + return add(sys, rSum, ONE_MAG); + } + + private static int[] multiply(LexoNumeralSystem sys, int[] l, int[] r) { + int[] result = new int[l.length + r.length]; + + for (int li = 0; li < l.length; ++li) + for (int ri = 0; ri < r.length; ++ri) { + int resultIndex = li + ri; + + for (result[resultIndex] += l[li] * r[ri]; + result[resultIndex] >= sys.getBase(); + result[resultIndex] -= sys.getBase()) ++result[resultIndex + 1]; + } + + return result; + } + + private static int[] complement(LexoNumeralSystem sys, int[] mag, int digits) { + if (digits <= 0) throw new IllegalArgumentException("Expected at least 1 digit"); + + int[] nmag = new int[digits]; + + Arrays.fill(nmag, sys.getBase() - 1); + + for (int i = 0; i < mag.length; ++i) nmag[i] = sys.getBase() - 1 - mag[i]; + + return nmag; + } + + private static int compare(int[] l, int[] r) { + if (l.length < r.length) return -1; + + if (l.length > r.length) return 1; + + for (int i = l.length - 1; i >= 0; --i) { + if (l[i] < r[i]) return -1; + + if (l[i] > r[i]) return 1; + } + + return 0; + } + + public static LexoInteger parse(String strFull, LexoNumeralSystem system) { + String str = strFull; + int sign = 1; + if (strFull.indexOf(system.getPositiveChar()) == 0) { + str = strFull.substring(1); + } else if (strFull.indexOf(system.getNegativeChar()) == 0) { + str = strFull.substring(1); + sign = -1; + } + + int[] mag = new int[str.length()]; + int strIndex = mag.length - 1; + + for (int magIndex = 0; strIndex >= 0; ++magIndex) { + mag[magIndex] = system.toDigit(str.charAt(strIndex)); + --strIndex; + } + + return make(system, sign, mag); + } + + protected static LexoInteger zero(LexoNumeralSystem sys) { + return new LexoInteger(sys, 0, ZERO_MAG); + } + + protected static LexoInteger one(LexoNumeralSystem sys) { + return make(sys, 1, ONE_MAG); + } + + public static LexoInteger make(LexoNumeralSystem sys, int sign, int[] mag) { + int actualLength; + actualLength = mag.length; + while (actualLength > 0 && mag[actualLength - 1] == 0) { + --actualLength; + } + + if (actualLength == 0) return zero(sys); + + if (actualLength == mag.length) return new LexoInteger(sys, sign, mag); + + int[] nmag = new int[actualLength]; + System.arraycopy(mag, 0, nmag, 0, actualLength); + return new LexoInteger(sys, sign, nmag); + } + + public LexoInteger add(LexoInteger other) { + checkSystem(other); + if (isZero()) return other; + + if (other.isZero()) return this; + + if (sign != other.sign) { + LexoInteger pos; + if (sign == -1) { + pos = negate(); + LexoInteger val = pos.subtract(other); + return val.negate(); + } + + pos = other.negate(); + return subtract(pos); + } + + int[] result = add(sys, mag, other.mag); + return make(sys, sign, result); + } + + public LexoInteger subtract(LexoInteger other) { + checkSystem(other); + if (isZero()) return other.negate(); + + if (other.isZero()) return this; + + if (sign != other.sign) { + LexoInteger negate; + if (sign == -1) { + negate = negate(); + LexoInteger sum = negate.add(other); + return sum.negate(); + } + + negate = other.negate(); + return add(negate); + } + + int cmp = compare(mag, other.mag); + if (cmp == 0) return zero(sys); + + return cmp < 0 + ? make(sys, sign == -1 ? 1 : -1, subtract(sys, other.mag, mag)) + : make(sys, sign == -1 ? -1 : 1, subtract(sys, mag, other.mag)); + } + + public LexoInteger multiply(LexoInteger other) { + checkSystem(other); + if (isZero()) return this; + + if (other.isZero()) return other; + + if (isOneish()) return sign == other.sign ? make(sys, 1, other.mag) : make(sys, -1, other.mag); + + if (other.isOneish()) return sign == other.sign ? make(sys, 1, mag) : make(sys, -1, mag); + + int[] newMag = multiply(sys, mag, other.mag); + return sign == other.sign ? make(sys, 1, newMag) : make(sys, -1, newMag); + } + + public LexoInteger negate() { + return isZero() ? this : make(sys, sign == 1 ? -1 : 1, mag); + } + + public LexoInteger shiftLeft() { + return shiftLeft(1); + } + + public LexoInteger shiftLeft(int times) { + if (times == 0) return this; + + if (times < 0) return shiftRight(Math.abs(times)); + + int[] nmag = new int[mag.length + times]; + System.arraycopy(mag, 0, nmag, times, mag.length); + return make(sys, sign, nmag); + } + + public LexoInteger shiftRight() { + return shiftRight(1); + } + + public LexoInteger shiftRight(int times) { + if (mag.length - times <= 0) return zero(sys); + + int[] nmag = new int[mag.length - times]; + System.arraycopy(mag, times, nmag, 0, nmag.length); + return make(sys, sign, nmag); + } + + public LexoInteger complement() { + return complement(mag.length); + } + + private LexoInteger complement(int digits) { + return make(sys, sign, complement(sys, mag, digits)); + } + + public boolean isZero() { + return sign == 0 && mag.length == 1 && mag[0] == 0; + } + + private boolean isOneish() { + return mag.length == 1 && mag[0] == 1; + } + + public boolean isOne() { + return sign == 1 && mag.length == 1 && mag[0] == 1; + } + + public int getMag(int index) { + return mag[index]; + } + + public LexoNumeralSystem getSystem() { + return sys; + } + + private void checkSystem(LexoInteger other) { + if (!sys.getName().equals(other.sys.getName())) + throw new IllegalArgumentException("Expected numbers of same numeral sys"); + } + + public String format() { + if (isZero()) return String.valueOf(sys.toChar(0)); + StringBuilder sb = new StringBuilder(); + for (int digit : mag) { + sb.insert(0, sys.toChar(digit)); + } + if (sign == -1) sb.setCharAt(0, sys.getNegativeChar()); + + return sb.toString(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LexoInteger that = (LexoInteger) o; + return sign == that.sign && Arrays.equals(mag, that.mag) && Objects.equals(sys, that.sys); + } + + @Override + public int hashCode() { + int result = Objects.hash(negativeSign, zeroSign, positiveSign, sign, sys); + result = 31 * result + Arrays.hashCode(mag); + return result; + } + + @Override + public String toString() { + return format(); + } + + @Override + public int compareTo(LexoInteger lexoInteger) { + if (this.equals(lexoInteger)) return 0; + if (null == lexoInteger) return 1; + + if (sign == -1) { + if (lexoInteger.sign == -1) { + int cmp = compare(mag, lexoInteger.mag); + if (cmp == -1) return 1; + return cmp == 1 ? -1 : 0; + } + + return -1; + } + + if (sign == 1) return lexoInteger.sign == 1 ? compare(mag, lexoInteger.mag) : 1; + + if (lexoInteger.sign == -1) return 1; + + return lexoInteger.sign == 1 ? -1 : 0; + } +} diff --git a/repository/src/main/java/org/apache/atlas/util/lexoRank/LexoRank.java b/repository/src/main/java/org/apache/atlas/util/lexoRank/LexoRank.java new file mode 100644 index 0000000000..29a8bf2d41 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/util/lexoRank/LexoRank.java @@ -0,0 +1,287 @@ +package org.apache.atlas.util.lexoRank; + + + + +import org.apache.atlas.util.lexoRank.system.LexoNumeralSystem; +import org.apache.atlas.util.lexoRank.system.LexoNumeralSystem36; + +import java.util.Objects; + +public class LexoRank implements Comparable { + + public static final LexoNumeralSystem NUMERAL_SYSTEM = new LexoNumeralSystem36(); + private static final LexoDecimal ZERO_DECIMAL = LexoDecimal.parse("0", NUMERAL_SYSTEM); + private static final LexoDecimal ONE_DECIMAL = LexoDecimal.parse("1", NUMERAL_SYSTEM); + private static final LexoDecimal EIGHT_DECIMAL = LexoDecimal.parse("8", NUMERAL_SYSTEM); + private static final LexoDecimal MIN_DECIMAL = ZERO_DECIMAL; + + private static final LexoDecimal MAX_DECIMAL = + LexoDecimal.parse("1000000", NUMERAL_SYSTEM).subtract(ONE_DECIMAL); + + private static final LexoDecimal MID_DECIMAL = between(MIN_DECIMAL, MAX_DECIMAL); + private static final LexoDecimal INITIAL_MIN_DECIMAL = + LexoDecimal.parse("100000", NUMERAL_SYSTEM); + + private static final LexoDecimal INITIAL_MAX_DECIMAL = + LexoDecimal.parse( + NUMERAL_SYSTEM.toChar(NUMERAL_SYSTEM.getBase() - 2) + "00000", NUMERAL_SYSTEM); + + private final String value; + private final LexoRankBucket bucket; + private final LexoDecimal decimal; + + private LexoRank(String value) { + this.value = value; + String[] parts = this.value.split("\\|"); + bucket = LexoRankBucket.from(parts[0]); + decimal = LexoDecimal.parse(parts[1], NUMERAL_SYSTEM); + } + + private LexoRank(LexoRankBucket bucket, LexoDecimal dec) { + value = bucket.format() + "|" + formatDecimal(dec); + this.bucket = bucket; + decimal = dec; + } + + public static LexoRank min() { + return from(LexoRankBucket.BUCKET_0, MIN_DECIMAL); + } + + public static LexoRank max() { + return max(LexoRankBucket.BUCKET_0); + } + + public static LexoRank middle() { + LexoRank minLexoRank = min(); + return minLexoRank.between(max(minLexoRank.bucket)); + } + + public static LexoRank max(LexoRankBucket bucket) { + return from(bucket, MAX_DECIMAL); + } + + public static LexoRank initial(LexoRankBucket bucket) { + return bucket == LexoRankBucket.BUCKET_0 + ? from(bucket, INITIAL_MIN_DECIMAL) + : from(bucket, INITIAL_MAX_DECIMAL); + } + + private static LexoDecimal between(LexoDecimal oLeft, LexoDecimal oRight) { + if (oLeft.getSystem() != oRight.getSystem()) + throw new IllegalArgumentException("Expected same system"); + + LexoDecimal left = oLeft; + LexoDecimal right = oRight; + LexoDecimal nLeft; + if (oLeft.getScale() < oRight.getScale()) { + nLeft = oRight.setScale(oLeft.getScale(), false); + if (oLeft.compareTo(nLeft) >= 0) return middle(oLeft, oRight); + + right = nLeft; + } + + if (oLeft.getScale() > right.getScale()) { + nLeft = oLeft.setScale(right.getScale(), true); + if (nLeft.compareTo(right) >= 0) return middle(oLeft, oRight); + + left = nLeft; + } + + LexoDecimal nRight; + for (int scale = left.getScale(); scale > 0; right = nRight) { + int nScale1 = scale - 1; + LexoDecimal nLeft1 = left.setScale(nScale1, true); + nRight = right.setScale(nScale1, false); + int cmp = nLeft1.compareTo(nRight); + if (cmp == 0) return checkMid(oLeft, oRight, nLeft1); + + if (nLeft1.compareTo(nRight) > 0) break; + + scale = nScale1; + left = nLeft1; + } + + LexoDecimal mid = middle(oLeft, oRight, left, right); + + int nScale; + for (int mScale = mid.getScale(); mScale > 0; mScale = nScale) { + nScale = mScale - 1; + LexoDecimal nMid = mid.setScale(nScale); + if (oLeft.compareTo(nMid) >= 0 || nMid.compareTo(oRight) >= 0) break; + + mid = nMid; + } + + return mid; + } + + private static LexoDecimal middle( + LexoDecimal lBound, LexoDecimal rBound, LexoDecimal left, LexoDecimal right) { + LexoDecimal mid = middle(left, right); + return checkMid(lBound, rBound, mid); + } + + private static LexoDecimal checkMid(LexoDecimal lBound, LexoDecimal rBound, LexoDecimal mid) { + if (lBound.compareTo(mid) >= 0) return middle(lBound, rBound); + + return mid.compareTo(rBound) >= 0 ? middle(lBound, rBound) : mid; + } + + private static LexoDecimal middle(LexoDecimal left, LexoDecimal right) { + LexoDecimal sum = left.add(right); + LexoDecimal mid = sum.multiply(LexoDecimal.half(left.getSystem())); + int scale = Math.max(left.getScale(), right.getScale()); + if (mid.getScale() > scale) { + LexoDecimal roundDown = mid.setScale(scale, false); + if (roundDown.compareTo(left) > 0) return roundDown; + + LexoDecimal roundUp = mid.setScale(scale, true); + if (roundUp.compareTo(right) < 0) return roundUp; + } + + return mid; + } + + private static String formatDecimal(LexoDecimal dec) { + String formatVal = dec.format(); + StringBuilder val = new StringBuilder(formatVal); + int partialIndex = formatVal.indexOf(NUMERAL_SYSTEM.getRadixPointChar()); + char zero = NUMERAL_SYSTEM.toChar(0); + if (partialIndex < 0) { + partialIndex = formatVal.length(); + val.append(NUMERAL_SYSTEM.getRadixPointChar()); + } + + while (partialIndex < 6) { + val.insert(0, zero); + ++partialIndex; + } + + // TODO CHECK LOGIC + int valLength = val.length() - 1; + while (val.charAt(valLength) == zero) { + valLength = val.length() - 1; + } + + return val.toString(); + } + + public static LexoRank parse(String str) { + if (isNullOrWhiteSpace(str)) throw new IllegalArgumentException(str); + return new LexoRank(str); + } + + public static LexoRank from(LexoRankBucket bucket, LexoDecimal dec) { + if (!dec.getSystem().getName().equals(NUMERAL_SYSTEM.getName())) + throw new IllegalArgumentException("Expected different system"); + + return new LexoRank(bucket, dec); + } + + private static boolean isNullOrWhiteSpace(String string) { + return string == null || string.equals(" "); + } + + public LexoRankBucket getBucket() { + return bucket; + } + + public LexoDecimal getDecimal() { + return decimal; + } + + public int CompareTo(LexoRank other) { + if (Objects.equals(this, other)) return 0; + if (Objects.equals(null, other)) return 1; + return value.compareTo(other.value); + } + + public LexoRank genPrev() { + if (isMax()) return new LexoRank(bucket, INITIAL_MAX_DECIMAL); + + LexoInteger floorInteger = decimal.floor(); + LexoDecimal floorDecimal = LexoDecimal.from(floorInteger); + LexoDecimal nextDecimal = floorDecimal.subtract(EIGHT_DECIMAL); + if (nextDecimal.compareTo(MIN_DECIMAL) <= 0) nextDecimal = between(MIN_DECIMAL, decimal); + + return new LexoRank(bucket, nextDecimal); + } + + public LexoRank inNextBucket() { + return from(bucket.next(), decimal); + } + + public LexoRank inPrevBucket() { + return from(bucket.prev(), decimal); + } + + public boolean isMin() { + return decimal.equals(MIN_DECIMAL); + } + + public boolean isMax() { + return decimal.equals(MAX_DECIMAL); + } + + public String format() { + return value; + } + + public LexoRank genNext() { + if (isMin()) return new LexoRank(bucket, INITIAL_MIN_DECIMAL); + + LexoInteger ceilInteger = decimal.ceil(); + LexoDecimal ceilDecimal = LexoDecimal.from(ceilInteger); + LexoDecimal nextDecimal = ceilDecimal.add(EIGHT_DECIMAL); + if (nextDecimal.compareTo(MAX_DECIMAL) >= 0) nextDecimal = between(decimal, MAX_DECIMAL); + + return new LexoRank(bucket, nextDecimal); + } + + public LexoRank between(LexoRank other) { + if (!bucket.equals(other.bucket)) + throw new IllegalArgumentException("Between works only within the same bucket"); + + int cmp = decimal.compareTo(other.decimal); + if (cmp > 0) return new LexoRank(bucket, between(other.decimal, decimal)); + if (cmp == 0) + throw new IllegalArgumentException( + "Try to rank between issues with same rank this=" + + this + + " other=" + + other + + " this.decimal=" + + decimal + + " other.decimal=" + + other.decimal); + return new LexoRank(bucket, between(decimal, other.decimal)); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LexoRank lexoRank = (LexoRank) o; + return Objects.equals(value, lexoRank.value) + && Objects.equals(bucket, lexoRank.bucket) + && Objects.equals(decimal, lexoRank.decimal); + } + + @Override + public int hashCode() { + return Objects.hash(value, bucket, decimal); + } + + @Override + public String toString() { + return format(); + } + + @Override + public int compareTo(LexoRank lexoRank) { + if (Objects.equals(this, lexoRank)) return 0; + if (Objects.equals(null, lexoRank)) return 1; + return value.compareTo(lexoRank.value); + } +} diff --git a/repository/src/main/java/org/apache/atlas/util/lexoRank/LexoRankBucket.java b/repository/src/main/java/org/apache/atlas/util/lexoRank/LexoRankBucket.java new file mode 100644 index 0000000000..c3b60b4982 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/util/lexoRank/LexoRankBucket.java @@ -0,0 +1,83 @@ +package org.apache.atlas.util.lexoRank; + + +import java.util.Objects; + +public class LexoRankBucket { + + protected static final LexoRankBucket BUCKET_0 = new LexoRankBucket("0"); + protected static final LexoRankBucket BUCKET_1 = new LexoRankBucket("1"); + protected static final LexoRankBucket BUCKET_2 = new LexoRankBucket("2"); + + private static final LexoRankBucket[] VALUES = {BUCKET_0, BUCKET_1, BUCKET_2}; + + private final LexoInteger value; + + private LexoRankBucket(String val) { + value = LexoInteger.parse(val, LexoRank.NUMERAL_SYSTEM); + } + + public static LexoRankBucket resolve(int bucketId) { + for (LexoRankBucket bucket : VALUES) { + if (bucket.equals(from(String.valueOf(bucketId)))) return bucket; + } + + throw new IllegalArgumentException("No bucket found with id " + bucketId); + } + + public static LexoRankBucket from(String str) { + LexoInteger val = LexoInteger.parse(str, LexoRank.NUMERAL_SYSTEM); + + for (LexoRankBucket bucket : VALUES) { + if (bucket.value.equals(val)) return bucket; + } + + throw new IllegalArgumentException("Unknown bucket: " + str); + } + + public static LexoRankBucket min() { + return VALUES[0]; + } + + public static LexoRankBucket max() { + return VALUES[VALUES.length - 1]; + } + + public String format() { + return value.format(); + } + + public LexoRankBucket next() { + if (this == BUCKET_0) return BUCKET_1; + + if (this == BUCKET_1) return BUCKET_2; + + return this == BUCKET_2 ? BUCKET_0 : BUCKET_2; + } + + public LexoRankBucket prev() { + if (this == BUCKET_0) return BUCKET_2; + + if (this == BUCKET_1) return BUCKET_0; + + return this == BUCKET_2 ? BUCKET_1 : BUCKET_0; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LexoRankBucket that = (LexoRankBucket) o; + return Objects.equals(value, that.value); + } + + @Override + public String toString() { + return format(); + } + + @Override + public int hashCode() { + return Objects.hash(value); + } +} diff --git a/repository/src/main/java/org/apache/atlas/util/lexoRank/system/LexoNumeralSystem.java b/repository/src/main/java/org/apache/atlas/util/lexoRank/system/LexoNumeralSystem.java new file mode 100644 index 0000000000..a996079e3d --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/util/lexoRank/system/LexoNumeralSystem.java @@ -0,0 +1,18 @@ +package org.apache.atlas.util.lexoRank.system; + +public interface LexoNumeralSystem { + + String getName(); + + int getBase(); + + char getPositiveChar(); + + char getNegativeChar(); + + char getRadixPointChar(); + + int toDigit(char var1); + + char toChar(int var1); +} diff --git a/repository/src/main/java/org/apache/atlas/util/lexoRank/system/LexoNumeralSystem10.java b/repository/src/main/java/org/apache/atlas/util/lexoRank/system/LexoNumeralSystem10.java new file mode 100644 index 0000000000..f2c84e1a50 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/util/lexoRank/system/LexoNumeralSystem10.java @@ -0,0 +1,32 @@ +package org.apache.atlas.util.lexoRank.system; + +public class LexoNumeralSystem10 implements LexoNumeralSystem { + public String getName() { + return "Base10"; + } + + public int getBase() { + return 10; + } + + public char getPositiveChar() { + return '+'; + } + + public char getNegativeChar() { + return '-'; + } + + public char getRadixPointChar() { + return '.'; + } + + public int toDigit(char ch) { + if (ch >= '0' && ch <= '9') return ch - 48; + throw new IllegalArgumentException("Not valid digit: " + ch); + } + + public char toChar(int digit) { + return (char) (digit + 48); + } +} diff --git a/repository/src/main/java/org/apache/atlas/util/lexoRank/system/LexoNumeralSystem36.java b/repository/src/main/java/org/apache/atlas/util/lexoRank/system/LexoNumeralSystem36.java new file mode 100644 index 0000000000..731871842e --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/util/lexoRank/system/LexoNumeralSystem36.java @@ -0,0 +1,36 @@ +package org.apache.atlas.util.lexoRank.system; + + +public class LexoNumeralSystem36 implements LexoNumeralSystem { + private final char[] digits = "0123456789abcdefghijklmnopqrstuvwxyz".toCharArray(); + + public String getName() { + return "Base36"; + } + + public int getBase() { + return 36; + } + + public char getPositiveChar() { + return '+'; + } + + public char getNegativeChar() { + return '-'; + } + + public char getRadixPointChar() { + return ':'; + } + + public int toDigit(char ch) { + if (ch >= '0' && ch <= '9') return ch - 48; + if (ch >= 'a' && ch <= 'z') return ch - 97 + 10; + throw new IllegalArgumentException("Not valid digit: " + ch); + } + + public char toChar(int digit) { + return digits[digit]; + } +} diff --git a/repository/src/main/java/org/apache/atlas/util/lexoRank/system/LexoNumeralSystem64.java b/repository/src/main/java/org/apache/atlas/util/lexoRank/system/LexoNumeralSystem64.java new file mode 100644 index 0000000000..89b0af2390 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/util/lexoRank/system/LexoNumeralSystem64.java @@ -0,0 +1,41 @@ +package org.apache.atlas.util.lexoRank.system; + + +public class LexoNumeralSystem64 implements LexoNumeralSystem { + + private final char[] digits = + "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ^_abcdefghijklmnopqrstuvwxyz".toCharArray(); + + public String getName() { + return "Base64"; + } + + public int getBase() { + return 64; + } + + public char getPositiveChar() { + return '+'; + } + + public char getNegativeChar() { + return '-'; + } + + public char getRadixPointChar() { + return ':'; + } + + public int toDigit(char ch) { + if (ch >= '0' && ch <= '9') return ch - 48; + if (ch >= 'A' && ch <= 'Z') return ch - 65 + 10; + if (ch == '^') return 36; + if (ch == '_') return 37; + if (ch >= 'a' && ch <= 'z') return ch - 97 + 38; + throw new IllegalArgumentException("Not valid digit: " + ch); + } + + public char toChar(int digit) { + return digits[digit]; + } +} diff --git a/repository/src/main/resources/atlas-servicedef-atlas.json b/repository/src/main/resources/atlas-servicedef-atlas.json new file mode 100644 index 0000000000..0539a562b9 --- /dev/null +++ b/repository/src/main/resources/atlas-servicedef-atlas.json @@ -0,0 +1,502 @@ +{ + "id": 15, + "name": "atlas", + "displayName": "atlas", + "implClass": "org.apache.atlas.services.atlas.RangerServiceAtlas", + "label": "Atlas Metadata Server", + "description": "Atlas Metadata Server", + "guid": "311a79b7-16f5-46f4-9829-a0224b9999c5", + "resources": [ + { + "itemId": 1, + "name": "type-category", + "type": "string", + "level": 10, + "mandatory": true, + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "true" + }, + "label": "Type Catagory", + "description": "Type Catagory" + }, + { + "itemId": 2, + "name": "type", + "type": "string", + "level": 20, + "mandatory": true, + "parent": "type-category", + "isValidLeaf": true, + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "false" + }, + "label": "Type Name", + "description": "Type Name", + "accessTypeRestrictions": ["type-read" ,"type-create", "type-update", "type-delete" ] + }, + { + "itemId": 3, + "name": "entity-type", + "type": "string", + "level": 10, + "mandatory": true, + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "false" + }, + "label": "Entity Type", + "description": "Entity Type" + }, + { + "itemId": 4, + "name": "entity-classification", + "type": "string", + "level": 20, + "mandatory": true, + "parent": "entity-type", + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "false" + }, + "label": "Entity Classification", + "description": "Entity Classification" + }, + { + "itemId": 5, + "name": "entity", + "type": "string", + "level": 30, + "mandatory": true, + "parent": "entity-classification", + "isValidLeaf": true, + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "true" + }, + "label": "Entity ID", + "description": "Entity ID", + "accessTypeRestrictions": ["entity-read", "entity-create", "entity-update", "entity-delete"] + }, + { + "itemId": 6, + "name": "atlas-service", + "type": "string", + "level": 10, + "mandatory": true, + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "true" + }, + "label": "Atlas Service", + "description": "Atlas Service", + "accessTypeRestrictions": ["admin-import", "admin-export", "admin-purge", "admin-audits", "admin-entity-audits", "admin-repair-index", "admin-task-cud"] + }, + { + "itemId": 7, + "name": "relationship-type", + "type": "string", + "level": 10, + "mandatory": true, + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "false" + }, + "label": "Relationship Type", + "description": "Relationship Type" + }, + { + "itemId": 8, + "name": "end-one-entity-type", + "type": "string", + "level": 20, + "mandatory": true, + "parent": "relationship-type", + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "false" + }, + "label": "End1 Entity Type", + "description": "End1 Entity Type" + }, + { + "itemId": 9, + "name": "end-one-entity-classification", + "type": "string", + "level": 30, + "mandatory": true, + "parent": "end-one-entity-type", + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "false" + }, + "label": "End1 Entity Classification", + "description": "End1 Entity Classification" + }, + { + "itemId": 10, + "name": "end-one-entity", + "type": "string", + "level": 40, + "mandatory": true, + "parent": "end-one-entity-classification", + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "true" + }, + "label": "End1 Entity ID", + "description": "End1 Entity ID" + }, + { + "itemId": 11, + "name": "end-two-entity-type", + "type": "string", + "level": 50, + "mandatory": true, + "parent": "end-one-entity", + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "false" + }, + "label": "End2 Entity Type", + "description": "End2 Entity Type" + }, + { + "itemId": 12, + "name": "end-two-entity-classification", + "type": "string", + "level": 60, + "mandatory": true, + "parent": "end-two-entity-type", + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "false" + }, + "label": "End2 Entity Classification", + "description": "End2 Entity Classification" + }, + { + "itemId": 13, + "name": "end-two-entity", + "type": "string", + "level": 70, + "mandatory": true, + "parent": "end-two-entity-classification", + "isValidLeaf": true, + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "true" + }, + "label": "End2 Entity ID", + "description": "End2 Entity ID", + "accessTypeRestrictions": [ + "add-relationship", + "update-relationship", + "remove-relationship" + ] + }, + { + "itemId": 14, + "name": "entity-label", + "type": "string", + "level": 40, + "mandatory": true, + "parent": "entity", + "isValidLeaf": true, + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "true" + }, + "label": "Label", + "description": "Label", + "accessTypeRestrictions": [ + "entity-add-label", + "entity-remove-label" + ] + }, + { + "itemId": 15, + "name": "entity-business-metadata", + "type": "string", + "level": 40, + "mandatory": true, + "parent": "entity", + "isValidLeaf": true, + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "true" + }, + "label": "Business Metadata", + "description": "Business Metadata", + "accessTypeRestrictions": [ + "entity-update-business-metadata" + ] + }, + { + "itemId": 16, + "name": "classification", + "type": "string", + "level": 40, + "mandatory": true, + "parent": "entity", + "isValidLeaf": true, + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": true, + "matcher": "org.apache.atlas.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "false" + }, + "label": "Targetted classifications", + "description": "Targetted classifications", + "accessTypeRestrictions": [ + "entity-add-classification", + "entity-update-classification", + "entity-remove-classification" + ] + } + ], + "accessTypes": [ + { + "itemId": 1, + "name": "type-create", + "label": "Create Type", + "impliedGrants": + [ + "type-read" + ] + }, + { + "itemId": 2, + "name": "type-update", + "label": "Update Type", + "impliedGrants": + [ + "type-read" + ] + }, + { + "itemId": 3, + "name": "type-delete", + "label": "Delete Type", + "impliedGrants": + [ + "type-read" + ] + }, + { + "itemId": 4, + "name": "entity-read", + "label": "Read Entity" + }, + { + "itemId": 5, + "name": "entity-create", + "label": "Create Entity" + }, + { + "itemId": 6, + "name": "entity-update", + "label": "Update Entity" + }, + { + "itemId": 7, + "name": "entity-delete", + "label": "Delete Entity" + }, + { + "itemId": 8, + "name": "entity-add-classification", + "label": "Add Classification" + }, + { + "itemId": 9, + "name": "entity-update-classification", + "label": "Update Classification" + }, + { + "itemId": 10, + "name": "entity-remove-classification", + "label": "Remove Classification" + }, + { + "itemId": 11, + "name": "admin-export", + "label": "Admin Export" + }, + { + "itemId": 12, + "name": "admin-import", + "label": "Admin Import" + }, + { + "itemId": 13, + "name": "add-relationship", + "label": "Add Relationship" + }, + { + "itemId": 14, + "name": "update-relationship", + "label": "Update Relationship" + }, + { + "itemId": 15, + "name": "remove-relationship", + "label": "Remove Relationship" + }, + { + "itemId": 16, + "name": "admin-purge", + "label": "Admin Purge" + }, + { + "itemId": 17, + "name": "entity-add-label", + "label": "Add Label" + }, + { + "itemId": 18, + "name": "entity-remove-label", + "label": "Remove Label" + }, + { + "itemId": 19, + "name": "entity-update-business-metadata", + "label": "Update Business Metadata" + }, + { + "itemId": 20, + "name": "type-read", + "label": "Read Type" + }, + { + "itemId": 21, + "name": "admin-audits", + "label": "Admin Audits" + }, + { + "itemId": 22, + "name": "admin-entity-audits", + "label": "Admin Entity Audits" + }, + { + "itemId": 23, + "name": "admin-repair-index", + "label": "Admin Repair Index" + }, + { + "itemId": 24, + "name": "admin-task-cud", + "label": "Admin task CUD API" + } + + ], + "configs": [ + { + "itemId": 1, + "name": "username", + "type": "string", + "mandatory": true, + "label": "Username" + }, + { + "itemId": 2, + "name": "password", + "type": "password", + "mandatory": true, + "label": "Password" + }, + { + "itemId": 3, + "name": "atlas.rest.address", + "type": "string", + "mandatory": true, + "defaultValue": "http://localhost:21000" + }, + { + "itemId": 4, + "name": "commonNameForCertificate", + "type": "string", + "mandatory": false, + "label": "Common Name for Certificate" + }, + + { + "itemId": 5, + "name": "ranger.plugin.audit.filters", + "type": "string", + "subType": "", + "mandatory": false, + "validationRegEx":"", + "validationMessage": "", + "uiHint":"", + "label": "Ranger Default Audit Filters", + "defaultValue": "[ {'accessResult': 'DENIED', 'isAudited': true}, {'users':['atlas'] ,'isAudited':false} ]" + } + ], + "options": { + "enableDenyAndExceptionsInPolicies": "true" + } +} diff --git a/repository/src/test/java/org/apache/atlas/repository/migration/MigrationProgressServiceTest.java b/repository/src/test/java/org/apache/atlas/repository/migration/MigrationProgressServiceTest.java index 33125c86ca..b427988470 100644 --- a/repository/src/test/java/org/apache/atlas/repository/migration/MigrationProgressServiceTest.java +++ b/repository/src/test/java/org/apache/atlas/repository/migration/MigrationProgressServiceTest.java @@ -17,6 +17,7 @@ */ package org.apache.atlas.repository.migration; +import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.impexp.MigrationStatus; import org.apache.atlas.repository.graphdb.*; import org.apache.atlas.repository.graphdb.janus.migration.ReaderStatusManager; @@ -84,7 +85,11 @@ public void cachedStatusReturnedIfQueriedBeforeCacheExpiration() { } private MigrationProgressService getMigrationStatusForTest(Configuration cfg, TinkerGraph tg) { - return new MigrationProgressService(cfg, createMigrator(tg)); + try { + return new MigrationProgressService(cfg, createMigrator(tg)); + } catch (AtlasBaseException e) { + throw new RuntimeException(e); + } } @Test diff --git a/repository/src/test/java/org/apache/atlas/repository/util/FilterUtilTest.java b/repository/src/test/java/org/apache/atlas/repository/util/FilterUtilTest.java new file mode 100644 index 0000000000..8676093a79 --- /dev/null +++ b/repository/src/test/java/org/apache/atlas/repository/util/FilterUtilTest.java @@ -0,0 +1,36 @@ +package org.apache.atlas.repository.util; + +import org.junit.Test; + +import static org.apache.atlas.repository.util.FilterUtil.validateFilePath; +import static org.junit.Assert.*; + +public class FilterUtilTest { + @Test + public void testValidateFilePath() { + // Array of test cases, each containing the file path and the expected boolean result + Object[][] testCases = { + {"/var/app/allowed/file.txt", true, "Should return true for a valid path within the allowed directory."}, + {"/tmp/../notallowed/file.txt", false, "Should return false for a path attempting directory traversal."}, + {"/var/app/allowed/./file.txt", false, "Should return false for a path with relative current directory notation."}, + {"/Users/username/repos/repo0/.\\file.txt", false, "Should return false for a path with mixed slash types potentially bypassing checks."}, + {"tmp/file.txt", false, "Should return false for non-absolute paths."}, + {"", false, "Should return false for empty paths"}, + {"/var/app/allowed/..\\file.txt", false, "Should return false for paths with unusual characters aiming to navigate directories."}, + {"/Users/username/repos/repo0/%2e%2e/notallowed/file.txt", false, "Should return false for paths with URL-encoded traversal sequences."}, + {"/var/app/allowed/\0file.txt", false, "Should return false for paths that cause exceptions, like those containing null bytes."} + }; + + for (Object[] testCase : testCases) { + String path = (String) testCase[0]; + boolean expected = (Boolean) testCase[1]; + String message = (String) testCase[2]; + + if (expected) { + assertTrue(message, validateFilePath(path)); + } else { + assertFalse(message, validateFilePath(path)); + } + } + } +} diff --git a/server-api/src/main/java/org/apache/atlas/RequestContext.java b/server-api/src/main/java/org/apache/atlas/RequestContext.java index 1c7ccababb..30411235b8 100644 --- a/server-api/src/main/java/org/apache/atlas/RequestContext.java +++ b/server-api/src/main/java/org/apache/atlas/RequestContext.java @@ -47,6 +47,9 @@ public class RequestContext { private final Map updatedEntities = new HashMap<>(); private final Map deletedEntities = new HashMap<>(); private final Map restoreEntities = new HashMap<>(); + + + private Map lexoRankCache = null; private final Map entityCache = new HashMap<>(); private final Map entityHeaderCache = new HashMap<>(); private final Map entityExtInfoCache = new HashMap<>(); @@ -88,6 +91,11 @@ public class RequestContext { private boolean allowDeletedRelationsIndexsearch = false; private boolean includeMeanings = true; private boolean includeClassifications = true; + + private boolean includeClassificationNames = false; + + + private String lineageType = "DatasetProcessLineage"; private String currentTypePatchAction = ""; private AtlasTask currentTask; private String traceId; @@ -97,7 +105,6 @@ public class RequestContext { private boolean skipAuthorizationCheck = false; private Set deletedEdgesIdsForResetHasLineage = new HashSet<>(0); private String requestUri; - private boolean cacheEnabled; private boolean delayTagNotifications = false; private Map> deletedClassificationAndVertices = new HashMap<>(); @@ -160,6 +167,7 @@ public void clearCache() { this.requestContextHeaders.clear(); this.relationshipEndToVertexIdMap.clear(); this.relationshipMutationMap.clear(); + this.lexoRankCache = null; this.currentTask = null; this.skipAuthorizationCheck = false; this.delayTagNotifications = false; @@ -175,7 +183,7 @@ public void clearCache() { } if (CollectionUtils.isNotEmpty(applicationMetrics)) { if (Objects.nonNull(this.metricsRegistry)){ - this.metricsRegistry.collectIndexsearch(traceId, this.requestUri, applicationMetrics); + this.metricsRegistry.collectApplicationMetrics(traceId, this.requestUri, applicationMetrics); } applicationMetrics.clear(); } @@ -190,6 +198,13 @@ public void addApplicationMetrics(AtlasPerfMetrics.Metric metric) { this.applicationMetrics.add(metric); } + public String getLineageType() { + return lineageType; + } + + public void setLineageType(String lineageType) { + this.lineageType = lineageType; + } public void clearEntityCache() { this.entityCache.clear(); } @@ -711,12 +726,12 @@ public String getRequestUri() { return this.requestUri; } - public void setEnableCache(boolean cacheEnabled) { - this.cacheEnabled = cacheEnabled; + public boolean isIncludeClassificationNames() { + return includeClassificationNames; } - public boolean isCacheEnabled() { - return this.cacheEnabled; + public void setIncludeClassificationNames(boolean includeClassificationNames) { + this.includeClassificationNames = includeClassificationNames; } public class EntityGuidPair { @@ -778,4 +793,12 @@ public void clearMutationContext(String event) { public Map> getRelationshipMutationMap() { return relationshipMutationMap; } + + public Map getLexoRankCache() { + return lexoRankCache; + } + + public void setLexoRankCache(Map lexoRankCache) { + this.lexoRankCache = lexoRankCache; + } } \ No newline at end of file diff --git a/webapp/pom.xml b/webapp/pom.xml index 8783276f6d..35a0e8a010 100755 --- a/webapp/pom.xml +++ b/webapp/pom.xml @@ -552,7 +552,6 @@ org.apache.atlas atlas-testtools ${project.version} - test diff --git a/webapp/src/main/java/org/apache/atlas/Atlas.java b/webapp/src/main/java/org/apache/atlas/Atlas.java index 548ef6e4e7..21662f241c 100755 --- a/webapp/src/main/java/org/apache/atlas/Atlas.java +++ b/webapp/src/main/java/org/apache/atlas/Atlas.java @@ -48,6 +48,8 @@ import java.net.SocketException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; +import java.time.Duration; +import java.time.Instant; import java.util.*; import static org.apache.atlas.repository.Constants.INDEX_PREFIX; @@ -139,13 +141,17 @@ public static void main(String[] args) throws Exception { final boolean enableTLS = isTLSEnabled(enableTLSFlag, appPort); configuration.setProperty(SecurityProperties.TLS_ENABLED, String.valueOf(enableTLS)); + Instant start = Instant.now(); + showStartupInfo(buildConfiguration, enableTLS, appPort); if (configuration.getProperty("atlas.graph.index.search.backend").equals("elasticsearch")) { initElasticsearch(); + LOG.info("Starting service {} in {}", "elasticsearch", Duration.between(start, Instant.now()).toMillis()); } if (configuration.getString("atlas.authorizer.impl").equalsIgnoreCase("atlas")) { initAccessAuditElasticSearch(configuration); + LOG.info("Starting service {} in {}", "auditElasticsearch", Duration.between(start, Instant.now()).toMillis()); } server = EmbeddedServer.newServer(appHost, appPort, appPath, enableTLS); diff --git a/webapp/src/main/java/org/apache/atlas/web/filters/ActiveServerFilter.java b/webapp/src/main/java/org/apache/atlas/web/filters/ActiveServerFilter.java index cedf9c201a..7a30338425 100644 --- a/webapp/src/main/java/org/apache/atlas/web/filters/ActiveServerFilter.java +++ b/webapp/src/main/java/org/apache/atlas/web/filters/ActiveServerFilter.java @@ -21,6 +21,7 @@ import org.apache.atlas.AtlasConfiguration; import org.apache.atlas.AtlasErrorCode; import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.service.FeatureFlagStore; import org.apache.atlas.type.AtlasType; import org.apache.atlas.web.service.ActiveInstanceState; import org.apache.atlas.web.service.ServiceState; @@ -41,6 +42,8 @@ import javax.ws.rs.HttpMethod; import javax.ws.rs.core.HttpHeaders; import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; import java.util.HashMap; /** @@ -56,9 +59,9 @@ public class ActiveServerFilter implements Filter { private static final Logger LOG = LoggerFactory.getLogger(ActiveServerFilter.class); private static final String MIGRATION_STATUS_STATIC_PAGE = "migration-status.html"; - private static final String[] WHITELISTED_APIS_SIGNATURE = {"search", "lineage", "auditSearch", "accessors" - , "evaluator"}; + , "evaluator", "featureFlag"}; + private static final String DISABLE_WRITE_FLAG = "disable_writes"; private final ActiveInstanceState activeInstanceState; private ServiceState serviceState; @@ -88,13 +91,15 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo FilterChain filterChain) throws IOException, ServletException { // If maintenance mode is enabled, return a 503 if (AtlasConfiguration.ATLAS_MAINTENANCE_MODE.getBoolean()) { - // Block all the POST, PUT, DELETE operations - HttpServletRequest request = (HttpServletRequest) servletRequest; - HttpServletResponse response = (HttpServletResponse) servletResponse; - if (isBlockedMethod(request.getMethod()) && !isWhitelistedAPI(request.getRequestURI())) { - LOG.error("Maintenance mode enabled. Blocking request: {}", request.getRequestURI()); - sendMaintenanceModeResponse(response); - return; // Stop further processing + if (FeatureFlagStore.evaluate(DISABLE_WRITE_FLAG, "true")) { + // Block all the POST, PUT, DELETE operations + HttpServletRequest request = (HttpServletRequest) servletRequest; + HttpServletResponse response = (HttpServletResponse) servletResponse; + if (isBlockedMethod(request.getMethod()) && !isWhitelistedAPI(request.getRequestURI())) { + LOG.error("Maintenance mode enabled. Blocking request: {}", request.getRequestURI()); + sendMaintenanceModeResponse(response); + return; // Stop further processing + } } } @@ -228,14 +233,31 @@ private void handleRedirect(HttpServletRequest servletRequest, HttpServletRespon requestURI = "/"; } String redirectLocation = activeServerAddress + requestURI; - LOG.info("Not active. Redirecting to {}", redirectLocation); + String sanitizedLocation = sanitizeRedirectLocation(redirectLocation); + LOG.info("Not active. Redirecting to {}", sanitizedLocation); // A POST/PUT/DELETE require special handling by sending HTTP 307 instead of the regular 301/302. // Reference: http://stackoverflow.com/questions/2068418/whats-the-difference-between-a-302-and-a-307-redirect if (isUnsafeHttpMethod(servletRequest)) { - httpServletResponse.setHeader(HttpHeaders.LOCATION, redirectLocation); + httpServletResponse.setHeader(HttpHeaders.LOCATION, sanitizedLocation); httpServletResponse.setStatus(HttpServletResponse.SC_TEMPORARY_REDIRECT); } else { - httpServletResponse.sendRedirect(redirectLocation); + httpServletResponse.sendRedirect(sanitizedLocation); + } + } + public static String sanitizeRedirectLocation(String redirectLocation) { + if (redirectLocation == null) return null; + try { + String preProcessedUrl = redirectLocation.replace("\r", "").replace("\n", ""); + + preProcessedUrl = preProcessedUrl.replaceAll("%(?![0-9a-fA-F]{2})", "%25"); + + String encodedUrl = URLEncoder.encode(preProcessedUrl, "UTF-8"); + + encodedUrl = encodedUrl.replaceAll("%25([0-9a-fA-F]{2})", "%$1"); + + return encodedUrl; + } catch (UnsupportedEncodingException e) { + throw new RuntimeException("UTF-8 encoding not supported", e); } } diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java index 3afd2b451d..03af22db0f 100755 --- a/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java +++ b/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java @@ -47,6 +47,7 @@ import org.apache.atlas.model.metrics.AtlasMetrics; import org.apache.atlas.model.patches.AtlasPatch.AtlasPatches; import org.apache.atlas.model.tasks.AtlasTask; +import org.apache.atlas.repository.Constants; import org.apache.atlas.repository.audit.AtlasAuditService; import org.apache.atlas.repository.impexp.AtlasServerService; import org.apache.atlas.repository.impexp.ExportImportAuditService; @@ -56,6 +57,7 @@ import org.apache.atlas.repository.impexp.ZipSink; import org.apache.atlas.repository.patches.AtlasPatchManager; import org.apache.atlas.repository.store.graph.AtlasEntityStore; +import org.apache.atlas.service.FeatureFlagStore; import org.apache.atlas.service.metrics.MetricsRegistry; import org.apache.atlas.services.MetricsService; import org.apache.atlas.tasks.TaskManagement; @@ -122,6 +124,8 @@ import static org.apache.atlas.AtlasErrorCode.DEPRECATED_API; import static org.apache.atlas.AtlasErrorCode.DISABLED_API; +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2.VERTEX_TYPE; import static org.apache.atlas.web.filters.AtlasCSRFPreventionFilter.CSRF_TOKEN; @@ -430,10 +434,23 @@ public Response healthCheck() { for (final HealthStatus healthStatus : healthStatuses) { result.put(healthStatus.name, healthStatus); } - - GraphTraversal t = graph.V().limit(1); - t.hasNext(); - result.put("cassandra", new HealthStatus("cassandra", "ok", true, new Date().toString(), "")); + Iterator vertices = graph.query() + .has(ENTITY_TYPE_PROPERTY_KEY, "AuthService") + .has(QUALIFIED_NAME, "auth_service_atlas") + .vertices().iterator(); + + if (vertices.hasNext()) { + // If vertices are found, assume Cassandra is OK. + result.put("cassandra", new HealthStatus("cassandra", "ok", true, new Date().toString(), "")); + } else { + // Fallback to alternate method to check Cassandra's status. + GraphTraversal t = graph.V().limit(1); + if (t.hasNext()) { + result.put("cassandra", new HealthStatus("cassandra", "ok", true, new Date().toString(), "")); + } else { + throw new Exception("Cassandra check failed"); + } + } } catch (Exception e) { result.put("cassandra", new HealthStatus("cassandra", "error", true, new Date().toString(), e.toString())); cassandraFailed = true; @@ -930,6 +947,21 @@ public Map getDebugMetrics() { return debugMetricsRESTSink.getMetrics(); } + @POST + @Path("featureFlag") + @Produces(MediaType.APPLICATION_JSON) + public void setFeatureFlag(@QueryParam("key") String key, @QueryParam("value") String value) throws AtlasBaseException { + AtlasAuthorizationUtils.verifyAccess(new AtlasAdminAccessRequest(AtlasPrivilege.ADMIN_FEATURE_FLAG_CUD), "featureFlag"); + FeatureFlagStore.setFlag(key, value); + } + + @DELETE + @Path("featureFlag/{flag}") + @Produces(MediaType.APPLICATION_JSON) + public void deleteFeatureFlag(@PathParam("flag") String key) throws AtlasBaseException { + AtlasAuthorizationUtils.verifyAccess(new AtlasAdminAccessRequest(AtlasPrivilege.ADMIN_FEATURE_FLAG_CUD), "featureFlag"); + FeatureFlagStore.deleteFlag(key); + } private String getEditableEntityTypes(Configuration config) { String ret = DEFAULT_EDITABLE_ENTITY_TYPES; diff --git a/webapp/src/main/java/org/apache/atlas/web/rest/DiscoveryREST.java b/webapp/src/main/java/org/apache/atlas/web/rest/DiscoveryREST.java index 7717baa49a..590e3cb0bf 100644 --- a/webapp/src/main/java/org/apache/atlas/web/rest/DiscoveryREST.java +++ b/webapp/src/main/java/org/apache/atlas/web/rest/DiscoveryREST.java @@ -94,6 +94,7 @@ public class DiscoveryREST { private static final String INDEXSEARCH_TAG_NAME = "indexsearch"; private static final Set TRACKING_UTM_TAGS = new HashSet<>(Arrays.asList("ui_main_list", "ui_popup_searchbar")); + private static final String UTM_TAG_FROM_PRODUCT = "project_webapp"; @Inject public DiscoveryREST(AtlasTypeRegistry typeRegistry, AtlasDiscoveryService discoveryService, @@ -393,6 +394,7 @@ public AtlasSearchResult indexSearch(@Context HttpServletRequest servletRequest, RequestContext.get().setIncludeMeanings(!parameters.isExcludeMeanings()); RequestContext.get().setIncludeClassifications(!parameters.isExcludeClassifications()); + RequestContext.get().setIncludeClassificationNames(parameters.isIncludeClassificationNames()); try { if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "DiscoveryREST.indexSearch(" + parameters + ")"); @@ -435,12 +437,16 @@ public AtlasSearchResult indexSearch(@Context HttpServletRequest servletRequest, if(CollectionUtils.isNotEmpty(parameters.getUtmTags())) { AtlasPerfMetrics.Metric indexsearchMetric = new AtlasPerfMetrics.Metric(INDEXSEARCH_TAG_NAME); indexsearchMetric.addTag("utmTag", "other"); + indexsearchMetric.addTag("source", "other"); for (String utmTag : parameters.getUtmTags()) { if (TRACKING_UTM_TAGS.contains(utmTag)) { indexsearchMetric.addTag("utmTag", utmTag); break; } } + if (parameters.getUtmTags().contains(UTM_TAG_FROM_PRODUCT)) { + indexsearchMetric.addTag("source", UTM_TAG_FROM_PRODUCT); + } indexsearchMetric.addTag("name", INDEXSEARCH_TAG_NAME); indexsearchMetric.setTotalTimeMSecs(System.currentTimeMillis() - startTime); RequestContext.get().addApplicationMetrics(indexsearchMetric); diff --git a/webapp/src/main/java/org/apache/atlas/web/rest/EntityREST.java b/webapp/src/main/java/org/apache/atlas/web/rest/EntityREST.java index ee6846fd8f..cdb5db1d10 100644 --- a/webapp/src/main/java/org/apache/atlas/web/rest/EntityREST.java +++ b/webapp/src/main/java/org/apache/atlas/web/rest/EntityREST.java @@ -97,6 +97,8 @@ public class EntityREST { private static final int TWO_MILLION = HUNDRED_THOUSAND * 10 * 2; private static final Set ATTRS_WITH_TWO_MILLION_LIMIT = new HashSet() {{ add("rawQueryText"); + add("variablesSchemaBase64"); + add("visualBuilderSchemaBase64"); }}; @@ -893,7 +895,7 @@ public EntityMutationResponse createOrUpdate(AtlasEntitiesWithExtInfo entities, @QueryParam("replaceBusinessAttributes") @DefaultValue("false") boolean replaceBusinessAttributes, @QueryParam("overwriteBusinessAttributes") @DefaultValue("false") boolean isOverwriteBusinessAttributes) throws AtlasBaseException { AtlasPerfTracer perf = null; - RequestContext.get().setEnableCache(false); + try { if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntityREST.createOrUpdate(entityCount=" + @@ -1274,6 +1276,25 @@ public void setClassifications(AtlasEntityHeaders entityHeaders) throws AtlasBas } } + @POST + @Path("repairClassificationsMappings/{guid}") + @Produces(Servlets.JSON_MEDIA_TYPE) + @Consumes(Servlets.JSON_MEDIA_TYPE) + @Timed + public void repairClassifications(@PathParam("guid") String guid) throws AtlasBaseException { + AtlasPerfTracer perf = null; + + try { + if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntityREST.repairClassifications()"); + } + + entitiesStore.repairClassificationMappings(guid); + } finally { + AtlasPerfTracer.log(perf); + } + } + @POST @Path("/guid/{guid}/businessmetadata") @Produces(Servlets.JSON_MEDIA_TYPE) @@ -1911,4 +1932,29 @@ public void repairIndexByTypeName(@PathParam("typename") String typename, @Query AtlasPerfTracer.log(perf); } } + + @POST + @Path("/repair/accesscontrolAlias/{guid}") + @Timed + public void repairAccessControlAlias(@PathParam("guid") String guid) throws AtlasBaseException { + Servlets.validateQueryParamLength("guid", guid); + + AtlasPerfTracer perf = null; + + + try { + if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntityREST.repairAccessControlAlias"); + } + + entitiesStore.repairAccesscontrolAlias(guid); + + LOG.info("Repaired access control alias for entity with guid {}", guid); + + } finally { + AtlasPerfTracer.log(perf); + } + + + } } diff --git a/webapp/src/main/java/org/apache/atlas/web/rest/LineageREST.java b/webapp/src/main/java/org/apache/atlas/web/rest/LineageREST.java index 829b9aaf28..05ae292c10 100644 --- a/webapp/src/main/java/org/apache/atlas/web/rest/LineageREST.java +++ b/webapp/src/main/java/org/apache/atlas/web/rest/LineageREST.java @@ -48,6 +48,7 @@ import javax.ws.rs.core.MediaType; import java.util.HashMap; import java.util.Map; +import java.util.Objects; /** * REST interface for an entity's lineage information @@ -93,7 +94,7 @@ public LineageREST(AtlasTypeRegistry typeRegistry, AtlasLineageService atlasLine @Consumes(Servlets.JSON_MEDIA_TYPE) @Produces(Servlets.JSON_MEDIA_TYPE) @Timed - public AtlasLineageOnDemandInfo getLineageGraph(@PathParam("guid") String guid, + public AtlasLineageOnDemandInfo getLineageGraph(@PathParam("guid") String guid,@QueryParam("lineageType") String lineageType, LineageOnDemandRequest lineageOnDemandRequest) throws AtlasBaseException { if (!AtlasConfiguration.LINEAGE_ON_DEMAND_ENABLED.getBoolean()) { LOG.warn("LineageREST: "+ AtlasErrorCode.LINEAGE_ON_DEMAND_NOT_ENABLED.getFormattedErrorMessage(AtlasConfiguration.LINEAGE_ON_DEMAND_ENABLED.getPropertyName())); @@ -109,7 +110,9 @@ public AtlasLineageOnDemandInfo getLineageGraph(@PathParam("guid") String guid, if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "LineageREST.getOnDemandLineageGraph(" + guid + "," + lineageOnDemandRequest + ")"); } - + if(Objects.nonNull(lineageType)) { + RequestContext.get().setLineageType(lineageType); + } return atlasLineageService.getAtlasLineageInfo(guid, lineageOnDemandRequest); } finally { AtlasPerfTracer.log(perf); @@ -128,7 +131,7 @@ public AtlasLineageOnDemandInfo getLineageGraph(@PathParam("guid") String guid, @Consumes(Servlets.JSON_MEDIA_TYPE) @Produces(Servlets.JSON_MEDIA_TYPE) @Timed - public AtlasLineageListInfo getLineageList(LineageListRequest lineageListRequest) throws AtlasBaseException { + public AtlasLineageListInfo getLineageList(@QueryParam("lineageType") String lineageType, LineageListRequest lineageListRequest) throws AtlasBaseException { lineageListRequestValidator.validate(lineageListRequest); String guid = lineageListRequest.getGuid(); @@ -141,7 +144,9 @@ public AtlasLineageListInfo getLineageList(LineageListRequest lineageListRequest try { if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "LineageREST.getLineageList(" + guid + "," + lineageListRequest + ")"); - + if(Objects.nonNull(lineageType)) { + RequestContext.get().setLineageType(lineageType); + } return atlasLineageService.getLineageListInfoOnDemand(guid, lineageListRequest); } finally { AtlasPerfTracer.log(perf); diff --git a/webapp/src/main/java/org/apache/atlas/web/rest/MigrationREST.java b/webapp/src/main/java/org/apache/atlas/web/rest/MigrationREST.java index b491bb88ce..551d0f4aa2 100644 --- a/webapp/src/main/java/org/apache/atlas/web/rest/MigrationREST.java +++ b/webapp/src/main/java/org/apache/atlas/web/rest/MigrationREST.java @@ -3,6 +3,7 @@ import org.apache.atlas.AtlasErrorCode; import org.apache.atlas.RequestContext; import org.apache.atlas.annotation.Timed; +import org.apache.atlas.discovery.EntityDiscoveryService; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.discovery.IndexSearchParams; import org.apache.atlas.model.instance.AtlasEntity; @@ -10,10 +11,12 @@ import org.apache.atlas.repository.graph.GraphHelper; import org.apache.atlas.repository.graphdb.*; import org.apache.atlas.repository.store.graph.AtlasEntityStore; -import org.apache.atlas.repository.store.graph.v2.AtlasEntityStream; -import org.apache.atlas.repository.store.graph.v2.EntityStream; +import org.apache.atlas.repository.store.graph.v2.*; +import org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils; import org.apache.atlas.repository.store.users.KeycloakStore; +import org.apache.atlas.service.redis.RedisService; import org.apache.atlas.transformer.PreProcessorPoliciesTransformer; +import org.apache.atlas.type.AtlasTypeRegistry; import org.apache.atlas.utils.AtlasPerfTracer; import org.apache.atlas.v1.model.instance.Id; import org.apache.atlas.web.util.Servlets; @@ -35,6 +38,7 @@ import static org.apache.atlas.auth.client.keycloak.AtlasKeycloakClient.getKeycloakClient; import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; @Path("migration") @Singleton @@ -54,12 +58,115 @@ public class MigrationREST { private KeycloakStore keycloakStore; private AtlasGraph graph; + private final EntityGraphRetriever entityRetriever; + private final RedisService redisService; + protected final AtlasTypeRegistry typeRegistry; + private final EntityDiscoveryService discovery; + + private final TransactionInterceptHelper transactionInterceptHelper; + @Inject - public MigrationREST(AtlasEntityStore entityStore, AtlasGraph graph) { + public MigrationREST(AtlasEntityStore entityStore, AtlasGraph graph, RedisService redisService, EntityDiscoveryService discovery, + EntityGraphRetriever entityRetriever, AtlasTypeRegistry typeRegistry, TransactionInterceptHelper transactionInterceptHelper) { this.entityStore = entityStore; this.graph = graph; this.transformer = new PreProcessorPoliciesTransformer(); keycloakStore = new KeycloakStore(); + this.redisService = redisService; + this.discovery = discovery; + this.entityRetriever = entityRetriever; + this.typeRegistry = typeRegistry; + this.transactionInterceptHelper = transactionInterceptHelper; + } + + @POST + @Path("submit") + @Timed + public Boolean submit (@QueryParam("migrationType") String migrationType, @QueryParam("forceMigration") boolean forceMigration) throws Exception { + AtlasPerfTracer perf = null; + MigrationService migrationService; + + try { + if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "MigrationREST.submit(" + migrationType + ")"); + } + + migrationType = MIGRATION_TYPE_PREFIX + migrationType; + + isMigrationInProgress(migrationType); + + switch (migrationType) { + case DATA_MESH_QN: + migrationService = new DataMeshQNMigrationService(entityStore, discovery, entityRetriever, typeRegistry, transactionInterceptHelper, redisService, forceMigration); + break; + + default: + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Type of migration is not valid: " + migrationType); + } + + Thread migrationThread = new Thread(migrationService); + migrationThread.start(); + + } catch (Exception e) { + LOG.error("Error while submitting migration", e); + return Boolean.FALSE; + } finally { + AtlasPerfTracer.log(perf); + } + return Boolean.TRUE; + } + + private void isMigrationInProgress(String migrationType) throws AtlasBaseException { + String status = redisService.getValue(migrationType); + if (PreProcessorUtils.MigrationStatus.IN_PROGRESS.name().equals(status)) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, + String.format("Migration for %s is already in progress", migrationType)); + } + } + + @GET + @Path("status") + @Timed + public String getMigrationStatus(@QueryParam("migrationType") String migrationType) throws Exception { + AtlasPerfTracer perf = null; + + try { + if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "MigrationREST.getMigrationStatus(" + migrationType + ")"); + } + + String value = redisService.getValue(MIGRATION_TYPE_PREFIX + migrationType); + + return Objects.nonNull(value) ? value : "No Migration Found with this key"; + } catch (Exception e) { + LOG.error("Error while fetching status for migration", e); + throw e; + } finally { + AtlasPerfTracer.log(perf); + } + } + + @POST + @Path("dataproduct/inputs-outputs") + @Timed + public Boolean migrateProductInternalAttr (@QueryParam("guid") String guid) throws Exception { + AtlasPerfTracer perf = null; + + try { + if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "MigrationREST.migrateProductInternalAttr(" + guid + ")"); + } + + DataProductInputsOutputsMigrationService migrationService = new DataProductInputsOutputsMigrationService(entityRetriever, guid, transactionInterceptHelper); + migrationService.migrateProduct(); + + } catch (Exception e) { + LOG.error("Error while migration inputs/outputs for Dataproduct: {}", guid, e); + throw e; + } finally { + AtlasPerfTracer.log(perf); + } + return Boolean.TRUE; } @POST diff --git a/webapp/src/main/java/org/apache/atlas/web/service/AtlasDebugMetricsSink.java b/webapp/src/main/java/org/apache/atlas/web/service/AtlasDebugMetricsSink.java index ef24b861ea..ec43d3e141 100644 --- a/webapp/src/main/java/org/apache/atlas/web/service/AtlasDebugMetricsSink.java +++ b/webapp/src/main/java/org/apache/atlas/web/service/AtlasDebugMetricsSink.java @@ -19,6 +19,7 @@ import org.apache.atlas.web.model.DebugMetrics; import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hbase.shaded.org.apache.commons.configuration2.SubsetConfiguration; import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricsRecord; import org.apache.hadoop.metrics2.MetricsSink; @@ -58,10 +59,6 @@ public HashMap getMetrics() { return metricStructuredSnapshot; } - @Override - public void init(org.apache.commons.configuration2.SubsetConfiguration subsetConfiguration) { - } - @Override public void flush() { } @@ -112,4 +109,9 @@ private void updateMetricType(DebugMetrics debugMetrics, String metricType, Abst private static String inferMeasureType(String fullName, String nameWithoutMetricType) { return fullName.replaceFirst(nameWithoutMetricType, ""); } + + @Override + public void init(SubsetConfiguration subsetConfiguration) { + + } } \ No newline at end of file diff --git a/webapp/src/test/java/org/apache/atlas/web/filters/MetaStoreActiveServerFilterTest.java b/webapp/src/test/java/org/apache/atlas/web/filters/MetaStoreActiveServerFilterTest.java new file mode 100644 index 0000000000..5119ba7cc6 --- /dev/null +++ b/webapp/src/test/java/org/apache/atlas/web/filters/MetaStoreActiveServerFilterTest.java @@ -0,0 +1,39 @@ +package org.apache.atlas.web.filters; + +import org.junit.Test; + + +import static org.apache.atlas.web.filters.ActiveServerFilter.sanitizeRedirectLocation; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +public class MetaStoreActiveServerFilterTest { + + @Test + public void testSanitizeRedirectLocation() { + Object[][] testCases = { + {"https://dom-sub-uat.atlan.com/api/meta/entity/guid/fd7a69c9-738b-4b35-a0db-1da00cbd86cd", "https%3A%2F%2Fdom-sub-uat.atlan.com%2Fapi%2Fmeta%2Fentity%2Fguid%2Ffd7a69c9-738b-4b35-a0db-1da00cbd86cd"}, + {"https://datamesh.atlan.com/api/meta/entity/bulk?replaceBusinessAttributes=true&replaceClassifications=true", "https%3A%2F%2Fdatamesh.atlan.com%2Fapi%2Fmeta%2Fentity%2Fbulk%3FreplaceBusinessAttributes%3Dtrue%26replaceClassifications%3Dtrue"}, + {"http://example.com/page?param=value&another=one", "http%3A%2F%2Fexample.com%2Fpage%3Fparam%3Dvalue%26another%3Done"}, + {"http://example.com/page?param=value%Set-Cookie: test=evil", "http%3A%2F%2Fexample.com%2Fpage%3Fparam%3Dvalue%25Set-Cookie%3A+test%3Devil"}, + {"http://example.com/search?query=value\n", "http%3A%2F%2Fexample.com%2Fsearch%3Fquery%3Dvalue%3Cscript%3Ealert%28%27xss%27%29%3C%2Fscript%3E"}, + {"http://example.com/update?action=edit%HTTP/1.1 200 OKContent-Type: text/html", "http%3A%2F%2Fexample.com%2Fupdate%3Faction%3Dedit%25HTTP%2F1.1+200+OKContent-Type%3A+text%2Fhtml"}, + {"http://example.com/login?redirect=success%Set-Cookie: sessionId=12345", "http%3A%2F%2Fexample.com%2Flogin%3Fredirect%3Dsuccess%25Set-Cookie%3A+sessionId%3D12345"}, + {"http://example.com/page\r", "http%3A%2F%2Fexample.com%2Fpage"}, + {"http://example.com/page?next=url%0D%0AContent-Length: %300", "http%3A%2F%2Fexample.com%2Fpage%3Fnext%3Durl%0D%0AContent-Length%3A+%300"}, + {null, null} // Testing for null input + }; + + for (Object[] testCase : testCases) { + String input = (String) testCase[0]; + String expected = (String) testCase[1]; + + if (input == null) { + assertNull("Output should be null for null input.", sanitizeRedirectLocation(input)); + } else { + assertEquals("URLs should be correctly sanitized.", expected, sanitizeRedirectLocation(input)); + } + } + } + +}