diff --git a/.github/workflows/chart-release-dispatcher.yaml b/.github/workflows/chart-release-dispatcher.yaml index 8e325cd3277..10fc6cbdb00 100644 --- a/.github/workflows/chart-release-dispatcher.yaml +++ b/.github/workflows/chart-release-dispatcher.yaml @@ -26,12 +26,42 @@ jobs: run: | echo "branch=${{ github.event.workflow_run.head_branch }}" >> $GITHUB_OUTPUT + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + with: + token: ${{ secrets.my_pat }} + ref: ${{ steps.extract_branch.outputs.branch }} + fetch-depth: 0 + + - name: Get SHA of the branch + id: get_sha + run: | + branch_name=${{ steps.extract_branch.outputs.branch }} + sha=$(git rev-parse "refs/heads/$branch_name") + echo "GIT_SHA: $sha" + echo "sha=${sha}" >> $GITHUB_OUTPUT + - name: Extract Repository Name id: extract_repo_name run: | repo_name=$(basename $GITHUB_REPOSITORY) echo "repo_name=${repo_name}" >> $GITHUB_OUTPUT + - name: Get PR url and PR User + id: get_pr_url_user + run: | + head_sha=$(curl -s -H "Authorization: Bearer ${{ secrets.my_pat }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}/jobs" | jq -r '.jobs[0].head_sha') + echo "Head SHA: $head_sha" + pr_url=$(curl -s -H "Authorization: Bearer ${{ secrets.my_pat }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/search/issues?q=sha:$head_sha+type:pr" | jq -r '.items[0].html_url') + pr_user=$(curl -s -H "Authorization: Bearer ${{ secrets.my_pat }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/search/issues?q=sha:$head_sha+type:pr" | jq -r '.items[0].user.login') + echo "pr_url=$pr_url" >> $GITHUB_OUTPUT + echo "pr_user=$pr_user" >> $GITHUB_OUTPUT + + - name: echo PR_URL and PR_USER + run: | + echo "${{ steps.get_pr_url_user.outputs.pr_url }}" + echo "${{ steps.get_pr_url_user.outputs.pr_user }}" + - name: Repository Dispatch uses: peter-evans/repository-dispatch@v2 with: @@ -42,6 +72,8 @@ jobs: { "repo": { "name": "${{ steps.extract_repo_name.outputs.repo_name }}", - "branch": "${{ steps.extract_branch.outputs.branch }}" + "branch": "${{ steps.extract_branch.outputs.branch }}", + "pr_url": "${{ steps.get_pr_url_user.outputs.pr_url }}", + "pr_user": "${{ steps.get_pr_url_user.outputs.pr_user }}" } } diff --git a/addons/models/0000-Area0/0010-base_model.json b/addons/models/0000-Area0/0010-base_model.json index 4bfd6db9632..662db6179f8 100644 --- a/addons/models/0000-Area0/0010-base_model.json +++ b/addons/models/0000-Area0/0010-base_model.json @@ -1041,6 +1041,29 @@ "includeInNotification": false } ] + }, + { + "name": "StakeholderTitle", + "description": "Instance of a stakeholder title for Domains in Atlan", + "superTypes": [ + "Asset" + ], + "serviceType": "atlan", + "typeVersion": "1.0", + "attributeDefs": [ + { + "name": "stakeholderTitleDomainQualifiedNames", + "description": "qualified name array representing the Domains for which this StakeholderTitle is applicable", + "typeName": "array", + "indexType": "STRING", + "isOptional": true, + "cardinality": "SET", + "isUnique": false, + "isIndexable": false, + "skipScrubbing": true, + "includeInNotification": false + } + ] } ], "relationshipDefs": [ diff --git a/addons/policies/bootstrap_admin_policies.json b/addons/policies/bootstrap_admin_policies.json index 3cd9d7a62fa..5f301ad0af7 100644 --- a/addons/policies/bootstrap_admin_policies.json +++ b/addons/policies/bootstrap_admin_policies.json @@ -49,6 +49,31 @@ "admin-task-cud" ] } + }, + { + "typeName": "AuthPolicy", + "attributes": { + "name": "ADMIN_ALLOW_FEATURE_FLAG_CUD", + "qualifiedName": "ADMIN_ALLOW_FEATURE_FLAG_CUD", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 1, + "policyUsers": [ + "service-account-atlan-argo", + "service-account-atlan-backend" + ], + "policyGroups": [], + "policyRoles": [], + "policyResourceCategory": "ADMIN", + "policyResources": [ + "atlas-service:*" + ], + "policyActions": [ + "admin-featureFlag-cud" + ] + } } ] } \ No newline at end of file diff --git a/addons/policies/bootstrap_entity_policies.json b/addons/policies/bootstrap_entity_policies.json index 24ecee34212..38a6b86e80d 100644 --- a/addons/policies/bootstrap_entity_policies.json +++ b/addons/policies/bootstrap_entity_policies.json @@ -3005,6 +3005,291 @@ "entity-delete" ] } + }, + { + "typeName": "AuthPolicy", + "attributes": + { + "name": "READ_DATA_CONTRACT", + "qualifiedName": "READ_DATA_CONTRACT", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 1, + "policyUsers": + [], + "policyGroups": + [], + "policyRoles": + [ + "$admin", + "$member", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": + [ + "entity-type:DataContract", + "entity-classification:*", + "entity:*" + ], + "policyActions": + [ + "entity-read" + ] + } + }, + { + "typeName": "AuthPolicy", + "attributes": + { + "name": "CU_DATA_CONTRACT", + "qualifiedName": "CU_DATA_CONTRACT", + "description": "cu allow for data contract", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 1, + "policyUsers": + [], + "policyGroups": + [], + "policyRoles": + [ + "$admin", + "$member", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": + [ + "entity-type:DataContract", + "entity-classification:*", + "entity:*" + ], + "policyActions": + [ + "entity-create", + "entity-update" + ] + } + }, + { + "typeName": "AuthPolicy", + "attributes": + { + "name": "READ_DATA_MESH_STAKEHOLDER_TITLE", + "qualifiedName": "READ_DATA_MESH_STAKEHOLDER_TITLE", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyUsers": + [], + "policyGroups": + [], + "policyRoles": + [ + "$admin", + "$member", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": + [ + "entity-type:StakeholderTitle", + "entity-classification:*", + "entity:*" + ], + "policyActions": + [ + "entity-read" + ] + } + }, + + { + "typeName": "AuthPolicy", + "attributes": { + "name": "CUD_BUSINESS_POLICY", + "qualifiedName": "CUD_BUSINESS_POLICY", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 0, + "policyUsers": [], + "policyGroups": [], + "policyRoles": [ + "$admin", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": [ + "entity-type:BusinessPolicy", + "entity-type:BusinessPolicyException", + "entity-classification:*", + "entity:*" + ], + "policyActions": [ + "entity-create", + "entity-update", + "entity-delete" + ] + } + }, + { + "typeName": "AuthPolicy", + "attributes": { + "name": "READ_BUSINESS_POLICY", + "qualifiedName": "READ_BUSINESS_POLICY", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 0, + "policyUsers": [], + "policyGroups": [], + "policyRoles": [ + "$admin", + "$guest", + "$member", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": [ + "entity-type:BusinessPolicy", + "entity-type:BusinessPolicyException", + "entity-classification:*", + "entity:*" + ], + "policyActions": [ + "entity-read" + ] + } + }, + + { + "typeName": "AuthPolicy", + "attributes": { + "name": "CUD_INCIDENT", + "qualifiedName": "CUD_INCIDENT", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 0, + "policyUsers": [], + "policyGroups": [], + "policyRoles": [ + "$admin", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": [ + "entity-type:Incident", + "entity-classification:*", + "entity:*" + ], + "policyActions": [ + "entity-create", + "entity-update", + "entity-delete" + ] + } + }, + { + "typeName": "AuthPolicy", + "attributes": { + "name": "READ_INCIDENT" , + "qualifiedName": "READ_INCIDENT", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 0, + "policyUsers": [], + "policyGroups": [], + "policyRoles": [ + "$admin", + "$guest", + "$member", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": [ + "entity-type:Incident", + "entity-classification:*", + "entity:*" + ], + "policyActions": [ + "entity-read" + ] + } + }, + { + "typeName": "AuthPolicy", + "attributes": { + "name": "CRUD_BUSINESS_POLICY_LOG", + "qualifiedName": "CRUD_BUSINESS_POLICY_LOG", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 0, + "policyUsers": [], + "policyGroups": [], + "policyRoles": [ + "$admin", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": [ + "entity-type:BusinessPolicyLog", + "entity-classification:*", + "entity:*" + ], + "policyActions": [ + "entity-create", + "entity-read", + "entity-update", + "entity-delete" + ] + } + }, + { + "typeName": "AuthPolicy", + "attributes": { + "name": "CRUD_TASK", + "qualifiedName": "CRUD_TASK", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyPriority": 0, + "policyUsers": [], + "policyGroups": [], + "policyRoles": [ + "$admin", + "$guest", + "$member", + "$api-token-default-access" + ], + "policyResourceCategory": "ENTITY", + "policyResources": [ + "entity-type:Task", + "entity-classification:*", + "entity:*" + ], + "policyActions": [ + "entity-create", + "entity-read", + "entity-update", + "entity-delete" + ] + } } ] } diff --git a/addons/policies/bootstrap_heka_policies.json b/addons/policies/bootstrap_heka_policies.json index d092186d789..4a9aeb39356 100644 --- a/addons/policies/bootstrap_heka_policies.json +++ b/addons/policies/bootstrap_heka_policies.json @@ -13,6 +13,7 @@ "policyServiceName": "heka", "policyType": "deny", "policyPriority": 1, + "isPolicyEnabled": false, "policyUsers": [], "policyGroups": [], "policyRoles": @@ -32,4 +33,4 @@ } } ] -} \ No newline at end of file +} diff --git a/addons/policies/bootstrap_relationship_policies.json b/addons/policies/bootstrap_relationship_policies.json index 6c44567b872..2c123bec6b5 100644 --- a/addons/policies/bootstrap_relationship_policies.json +++ b/addons/policies/bootstrap_relationship_policies.json @@ -760,6 +760,82 @@ "remove-relationship" ] } + }, + { + "typeName": "AuthPolicy", + "attributes": + { + "name": "LINK_BUSINESS_POLICY_BUSINESS_POLICY", + "qualifiedName": "LINK_BUSINESS_POLICY_BUSINESS_POLICY", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyUsers": + [], + "policyGroups": + [], + "policyRoles": + [ + "$admin", + "$api-token-default-access" + ], + "policyResourceCategory": "RELATIONSHIP", + "policyResources": + [ + "end-one-entity-classification:*", + "end-two-entity-classification:*", + "end-one-entity:*", + "end-two-entity:*", + "end-one-entity-type:BusinessPolicy", + "end-two-entity-type:BusinessPolicy", + "relationship-type:RelatedBusinessPolicy" + ], + "policyActions": + [ + "add-relationship", + "update-relationship", + "remove-relationship" + ] + } + }, + { + "typeName": "AuthPolicy", + "attributes": + { + "name": "LINK_BUSINESS_POLICY_BUSINESS_POLICY_EXCEPTION", + "qualifiedName": "LINK_BUSINESS_POLICY_BUSINESS_POLICY_EXCEPTION", + "policyCategory": "bootstrap", + "policySubCategory": "default", + "policyServiceName": "atlas", + "policyType": "allow", + "policyUsers": + [], + "policyGroups": + [], + "policyRoles": + [ + "$admin", + "$api-token-default-access" + ], + "policyResourceCategory": "RELATIONSHIP", + "policyResources": + [ + "end-one-entity-classification:*", + "end-two-entity-classification:*", + "end-one-entity:*", + "end-two-entity:*", + "end-one-entity-type:BusinessPolicy", + "end-two-entity-type:BusinessPolicyException", + "relationship-type:BusinessPolicy_BusinessPolicyException" + ], + "policyActions": + [ + "add-relationship", + "update-relationship", + "remove-relationship" + ] + } } ] } \ No newline at end of file diff --git a/addons/policies/global_stakeholder-titles.json b/addons/policies/global_stakeholder-titles.json new file mode 100644 index 00000000000..a32088f11eb --- /dev/null +++ b/addons/policies/global_stakeholder-titles.json @@ -0,0 +1,41 @@ +{ + "entities": [ + { + "typeName": "StakeholderTitle", + "attributes": + { + "qualifiedName": "stakeholderTitle/default/DOMAIN_OWNER", + "name": "Domain Owner", + "stakeholderTitleDomainQualifiedNames": ["*/super"] + } + }, + { + "typeName": "StakeholderTitle", + "attributes": + { + "qualifiedName": "stakeholderTitle/default/DATA_PRODUCT_OWNER", + "name": "Data Product Owner", + "stakeholderTitleDomainQualifiedNames": ["*/super"] + } + }, + { + "typeName": "StakeholderTitle", + "attributes": + { + "qualifiedName": "stakeholderTitle/default/DATA_ENGINEER", + "name": "Data Engineer", + "stakeholderTitleDomainQualifiedNames": ["*/super"] + } + }, + { + "typeName": "StakeholderTitle", + "attributes": + { + "qualifiedName": "stakeholderTitle/default/ARCHITECT", + "name": "Architect", + "stakeholderTitleDomainQualifiedNames": ["*/super"] + } + } + ] +} + diff --git a/addons/static/templates/policy_cache_transformer_persona.json b/addons/static/templates/policy_cache_transformer_persona.json index a61b671090b..b9541abf5be 100644 --- a/addons/static/templates/policy_cache_transformer_persona.json +++ b/addons/static/templates/policy_cache_transformer_persona.json @@ -404,6 +404,58 @@ "entity-remove-classification" ] }, + { + "policyResourceCategory": "ENTITY", + "description": "Create Stakeholder for this Domain", + "policyType": "ACCESS", + "resources": [ + "entity:default/*/{entity}", + "entity-type:Stakeholder", + "entity-classification:*" + ], + "actions": [ + "entity-read", + "entity-create", + "entity-update", + "entity-delete" + ] + }, + { + "policyResourceCategory": "RELATIONSHIP", + "policyType": "ACCESS", + "description": "Link/unlink Stakeholder to this Domain", + + "resources": [ + "relationship-type:*", + + "end-one-entity-type:DataDomain", + "end-one-entity-classification:*", + "end-one-entity:{entity}", + + "end-two-entity-type:Stakeholder", + "end-two-entity-classification:*", + "end-two-entity:default/*/{entity}" + ], + "actions": ["add-relationship", "update-relationship", "remove-relationship"] + }, + { + "policyResourceCategory": "RELATIONSHIP", + "policyType": "ACCESS", + "description": "Link/unlink any Stakeholder Title to this Domain's Stakeholder", + + "resources": [ + "relationship-type:*", + + "end-one-entity-type:StakeholderTitle", + "end-one-entity-classification:*", + "end-one-entity:*", + + "end-two-entity-type:Stakeholder", + "end-two-entity-classification:*", + "end-two-entity:default/*/{entity}" + ], + "actions": ["add-relationship", "update-relationship", "remove-relationship"] + }, { "policyResourceCategory": "RELATIONSHIP", "policyType": "ACCESS", @@ -480,7 +532,19 @@ "actions": ["add-relationship", "update-relationship", "remove-relationship"] } ], - + "persona-domain-business-update-metadata": [ + { + "policyType": "ACCESS", + "policyResourceCategory": "ENTITY", + "resources": [ + "entity:{entity}", + "entity-type:DataDomain", + "entity-classification:*", + "entity-business-metadata:*" + ], + "actions": ["entity-update-business-metadata"] + } + ], "persona-domain-sub-domain-read": [ { @@ -542,6 +606,59 @@ "entity-remove-classification" ] }, + { + "policyResourceCategory": "ENTITY", + "policyType": "ACCESS", + "description": "Create Stakeholder for Sub Domains", + + "resources": [ + "entity:default/*/{entity}/*", + "entity-type:Stakeholder", + "entity-classification:*" + ], + "actions": [ + "entity-read", + "entity-create", + "entity-update", + "entity-delete" + ] + }, + { + "policyResourceCategory": "RELATIONSHIP", + "policyType": "ACCESS", + "description": "Link/unlink Stakeholder to Sub Domains", + + "resources": [ + "relationship-type:*", + + "end-one-entity-type:DataDomain", + "end-one-entity-classification:*", + "end-one-entity:{entity}/*domain/*", + + "end-two-entity-type:Stakeholder", + "end-two-entity-classification:*", + "end-two-entity:default/*/{entity}/*" + ], + "actions": ["add-relationship", "update-relationship", "remove-relationship"] + }, + { + "policyResourceCategory": "RELATIONSHIP", + "policyType": "ACCESS", + "description": "Link/unlink any Stakeholder Title to sub-domains's Stakeholder", + + "resources": [ + "relationship-type:*", + + "end-one-entity-type:StakeholderTitle", + "end-one-entity-classification:*", + "end-one-entity:*", + + "end-two-entity-type:Stakeholder", + "end-two-entity-classification:*", + "end-two-entity:default/*/{entity}/*" + ], + "actions": ["add-relationship", "update-relationship", "remove-relationship"] + }, { "policyResourceCategory": "RELATIONSHIP", "policyType": "ACCESS", @@ -589,6 +706,19 @@ "actions": ["entity-delete"] } ], + "persona-domain-sub-domain-business-update-metadata": [ + { + "policyType": "ACCESS", + "policyResourceCategory": "ENTITY", + "resources": [ + "entity:{entity}/*domain/*", + "entity-type:DataDomain", + "entity-classification:*", + "entity-business-metadata:*" + ], + "actions": ["entity-update-business-metadata"] + } + ], "persona-domain-product-read": [ { @@ -719,8 +849,19 @@ "actions": ["entity-delete"] } ], - - + "persona-domain-product-business-update-metadata": [ + { + "policyType": "ACCESS", + "policyResourceCategory": "ENTITY", + "resources": [ + "entity:{entity}/*product/*", + "entity-type:DataProduct", + "entity-classification:*", + "entity-business-metadata:*" + ], + "actions": ["entity-update-business-metadata"] + } + ], "select": [ { diff --git a/auth-agents-common/src/main/resources/service-defs/atlas-servicedef-atlas.json b/auth-agents-common/src/main/resources/service-defs/atlas-servicedef-atlas.json index 0539a562b9b..dc8a72c20d4 100644 --- a/auth-agents-common/src/main/resources/service-defs/atlas-servicedef-atlas.json +++ b/auth-agents-common/src/main/resources/service-defs/atlas-servicedef-atlas.json @@ -450,6 +450,11 @@ "itemId": 24, "name": "admin-task-cud", "label": "Admin task CUD API" + }, + { + "itemId": 25, + "name": "admin-featureFlag-cud", + "label": "Admin featureflag CUD API" } ], diff --git a/authorization/src/main/java/org/apache/atlas/authorize/AtlasPrivilege.java b/authorization/src/main/java/org/apache/atlas/authorize/AtlasPrivilege.java index aaf02cbe7a7..29d332cad1d 100644 --- a/authorization/src/main/java/org/apache/atlas/authorize/AtlasPrivilege.java +++ b/authorization/src/main/java/org/apache/atlas/authorize/AtlasPrivilege.java @@ -51,7 +51,9 @@ public enum AtlasPrivilege { ADMIN_ENTITY_AUDITS("admin-entity-audits"), ADMIN_REPAIR_INDEX("admin-repair-index"), - ADMIN_TASK_CUD("admin-task-cud"); + ADMIN_TASK_CUD("admin-task-cud"), + + ADMIN_FEATURE_FLAG_CUD("admin-featureFlag-cud"); private final String type; AtlasPrivilege(String actionType){ diff --git a/common/src/main/java/org/apache/atlas/repository/Constants.java b/common/src/main/java/org/apache/atlas/repository/Constants.java index 8d4d47ea574..9408328b9f4 100644 --- a/common/src/main/java/org/apache/atlas/repository/Constants.java +++ b/common/src/main/java/org/apache/atlas/repository/Constants.java @@ -19,7 +19,9 @@ package org.apache.atlas.repository; import org.apache.atlas.ApplicationProperties; +import org.apache.atlas.AtlasConfiguration; import org.apache.atlas.AtlasException; +import org.apache.atlas.service.FeatureFlagStore; import org.apache.commons.configuration.Configuration; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; @@ -133,6 +135,26 @@ public final class Constants { public static final String GLOSSARY_TERMS_EDGE_LABEL = "r:AtlasGlossaryTermAnchor"; public static final String GLOSSARY_CATEGORY_EDGE_LABEL = "r:AtlasGlossaryCategoryAnchor"; + /** + * MESH property keys. + */ + public static final String DATA_DOMAIN_ENTITY_TYPE = "DataDomain"; + public static final String DATA_PRODUCT_ENTITY_TYPE = "DataProduct"; + + public static final String STAKEHOLDER_ENTITY_TYPE = "Stakeholder"; + public static final String STAKEHOLDER_TITLE_ENTITY_TYPE = "StakeholderTitle"; + + public static final String REL_DOMAIN_TO_DOMAINS = "parent_domain_sub_domains"; + public static final String REL_DOMAIN_TO_PRODUCTS = "data_domain_data_products"; + + public static final String REL_DOMAIN_TO_STAKEHOLDERS = "data_domain_stakeholders"; + public static final String REL_STAKEHOLDER_TITLE_TO_STAKEHOLDERS = "stakeholder_title_stakeholders"; + + public static final String REL_DATA_PRODUCT_TO_OUTPUT_PORTS = "data_products_output_ports"; + public static final String REL_DATA_PRODUCT_TO_INPUT_PORTS = "data_products_input_ports"; + + public static final String INPUT_PORT_PRODUCT_EDGE_LABEL = "__Asset.inputPortDataProducts"; + public static final String OUTPUT_PORT_PRODUCT_EDGE_LABEL = "__Asset.outputPortDataProducts"; /** * SQL property keys. @@ -150,6 +172,7 @@ public final class Constants { public static final String PURPOSE_ENTITY_TYPE = "Purpose"; public static final String POLICY_ENTITY_TYPE = "AuthPolicy"; public static final String SERVICE_ENTITY_TYPE = "AuthService"; + public static final String REL_POLICY_TO_ACCESS_CONTROL = "access_control_policies"; /** * Resource @@ -162,6 +185,13 @@ public final class Constants { public static final String ASSET_README_EDGE_LABEL = "__Asset.readme"; public static final String ASSET_LINK_EDGE_LABEL = "__Asset.links"; + /** + * Contract + */ + public static final String CONTRACT_ENTITY_TYPE = "DataContract"; + public static final String ATTR_CONTRACT_VERSION = "dataContractVersion"; + + /** * Lineage relations. */ @@ -371,7 +401,6 @@ public enum SupportedFileExtensions { XLSX, XLS, CSV } public static final String CATALOG_PROCESS_INPUT_RELATIONSHIP_LABEL = "__Process.inputs"; public static final String CATALOG_PROCESS_OUTPUT_RELATIONSHIP_LABEL = "__Process.outputs"; - public static final String COLUMN_LINEAGE_RELATIONSHIP_LABEL = "__Process.columnProcesses"; public static final String CLASSIFICATION_PROPAGATION_MODE_DEFAULT ="DEFAULT"; public static final String CLASSIFICATION_PROPAGATION_MODE_RESTRICT_LINEAGE ="RESTRICT_LINEAGE"; @@ -381,14 +410,12 @@ public enum SupportedFileExtensions { XLSX, XLS, CSV } public static final HashMap> CLASSIFICATION_PROPAGATION_MODE_LABELS_MAP = new HashMap>(){{ put(CLASSIFICATION_PROPAGATION_MODE_RESTRICT_LINEAGE, new ArrayList<>( Arrays.asList(CATALOG_PROCESS_INPUT_RELATIONSHIP_LABEL, - CATALOG_PROCESS_OUTPUT_RELATIONSHIP_LABEL, - COLUMN_LINEAGE_RELATIONSHIP_LABEL + CATALOG_PROCESS_OUTPUT_RELATIONSHIP_LABEL ))); put(CLASSIFICATION_PROPAGATION_MODE_DEFAULT, null); put(CLASSIFICATION_PROPAGATION_MODE_RESTRICT_HIERARCHY, new ArrayList<>( Arrays.asList(CATALOG_PROCESS_INPUT_RELATIONSHIP_LABEL, - CATALOG_PROCESS_OUTPUT_RELATIONSHIP_LABEL, - COLUMN_LINEAGE_RELATIONSHIP_LABEL + CATALOG_PROCESS_OUTPUT_RELATIONSHIP_LABEL ))); }}; @@ -403,7 +430,8 @@ public enum SupportedFileExtensions { XLSX, XLS, CSV } public static final String ATTR_STARRED_DETAILS_LIST = "starredDetailsList"; public static final String ATTR_ASSET_STARRED_BY = "assetStarredBy"; public static final String ATTR_ASSET_STARRED_AT = "assetStarredAt"; - + public static final String ATTR_CERTIFICATE_STATUS = "certificateStatus"; + public static final String ATTR_CONTRACT = "dataContractJson"; public static final String STRUCT_STARRED_DETAILS = "StarredDetails"; public static final String KEYCLOAK_ROLE_ADMIN = "$admin"; @@ -418,12 +446,15 @@ public enum SupportedFileExtensions { XLSX, XLS, CSV } public static final Set SKIP_UPDATE_AUTH_CHECK_TYPES = new HashSet() {{ add(README_ENTITY_TYPE); add(LINK_ENTITY_TYPE); + add(STAKEHOLDER_ENTITY_TYPE); + add(STAKEHOLDER_TITLE_ENTITY_TYPE); }}; public static final Set SKIP_DELETE_AUTH_CHECK_TYPES = new HashSet() {{ add(README_ENTITY_TYPE); add(LINK_ENTITY_TYPE); add(POLICY_ENTITY_TYPE); + add(STAKEHOLDER_TITLE_ENTITY_TYPE); }}; private Constants() { @@ -445,6 +476,20 @@ private static String getEncodedTypePropertyKey(String defaultKey) { } } + public static String getESIndex() { + String indexSuffix = null; + if(AtlasConfiguration.ATLAS_MAINTENANCE_MODE.getBoolean()) { + try { + if (FeatureFlagStore.evaluate("use_temp_es_index", "true")) { + indexSuffix = "_temp"; + } + } catch (Exception e) { + LOG.error("Failed to evaluate feature flag with error", e); + } + } + return indexSuffix == null ? VERTEX_INDEX_NAME : VERTEX_INDEX_NAME + indexSuffix; + } + public static String getStaticFileAsString(String fileName) throws IOException { String atlasHomeDir = System.getProperty("atlas.home"); atlasHomeDir = StringUtils.isEmpty(atlasHomeDir) ? "." : atlasHomeDir; diff --git a/common/src/main/java/org/apache/atlas/service/FeatureFlagStore.java b/common/src/main/java/org/apache/atlas/service/FeatureFlagStore.java new file mode 100644 index 00000000000..adfca599d12 --- /dev/null +++ b/common/src/main/java/org/apache/atlas/service/FeatureFlagStore.java @@ -0,0 +1,45 @@ +package org.apache.atlas.service; + +import org.apache.atlas.service.redis.RedisService; +import org.apache.commons.lang.StringUtils; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.stereotype.Component; + +@Component +public class FeatureFlagStore { + private static RedisService redisService = null; + public FeatureFlagStore(@Qualifier("redisServiceImpl") RedisService redisService) { + FeatureFlagStore.redisService = redisService; + } + + public static boolean evaluate(String key, String expectedValue) { + boolean ret = false; + try{ + if (StringUtils.isEmpty(key) || StringUtils.isEmpty(expectedValue)) + return ret; + String value = redisService.getValue(addFeatureFlagNamespace(key)); + ret = StringUtils.equals(value, expectedValue); + } catch (Exception e) { + return ret; + } + return ret; + } + + public static void setFlag(String key, String value) { + if (StringUtils.isEmpty(key) || StringUtils.isEmpty(value)) + return; + + redisService.putValue(addFeatureFlagNamespace(key), value); + } + + public static void deleteFlag(String key) { + if (StringUtils.isEmpty(key)) + return; + + redisService.removeValue(addFeatureFlagNamespace(key)); + } + + private static String addFeatureFlagNamespace(String key) { + return "ff:"+key; + } +} diff --git a/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistry.java b/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistry.java index 9fdf5b903e5..89741554872 100644 --- a/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistry.java +++ b/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistry.java @@ -10,7 +10,7 @@ public interface MetricsRegistry { void collect(String requestId, String requestUri, AtlasPerfMetrics metrics); - void collectIndexsearch(String requestId, String requestUri, List applicationMetrics); + void collectApplicationMetrics(String requestId, String requestUri, List applicationMetrics); void scrape(PrintWriter writer) throws IOException; diff --git a/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistryServiceImpl.java b/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistryServiceImpl.java index 1ae6d2980c4..19171325e27 100644 --- a/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistryServiceImpl.java +++ b/common/src/main/java/org/apache/atlas/service/metrics/MetricsRegistryServiceImpl.java @@ -1,12 +1,10 @@ package org.apache.atlas.service.metrics; -import io.micrometer.core.instrument.Metrics; -import io.micrometer.core.instrument.Tag; -import io.micrometer.core.instrument.Tags; -import io.micrometer.core.instrument.Timer; +import io.micrometer.core.instrument.*; import io.micrometer.prometheus.PrometheusMeterRegistry; import org.apache.atlas.ApplicationProperties; import org.apache.atlas.AtlasException; +import org.apache.atlas.utils.AtlasMetricType; import org.apache.atlas.utils.AtlasPerfMetrics; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -64,35 +62,42 @@ public void collect(String requestId, String requestUri, AtlasPerfMetrics metric } } //Use this if you want to publish Histograms - public void collectIndexsearch(String requestId, String requestUri, List applicationMetrics){ + public void collectApplicationMetrics(String requestId, String requestUri, List applicationMetrics){ try { for(AtlasPerfMetrics.Metric metric : applicationMetrics){ - Timer.builder(APPLICATION_LEVEL_METRICS_SUMMARY) - .serviceLevelObjectives( - Duration.ofMillis(500), - Duration.ofMillis(750), - Duration.ofMillis(1000), - Duration.ofMillis(1200), - Duration.ofMillis(1500), - Duration.ofSeconds(2), - Duration.ofSeconds(3), - Duration.ofSeconds(4), - Duration.ofSeconds(5), - Duration.ofSeconds(7), - Duration.ofSeconds(10), - Duration.ofSeconds(15), - Duration.ofSeconds(20), - Duration.ofSeconds(25), - Duration.ofSeconds(30), - Duration.ofSeconds(40), - Duration.ofSeconds(60), - Duration.ofSeconds(90), - Duration.ofSeconds(120), - Duration.ofSeconds(180) - ) - .publishPercentiles(PERCENTILES) - .tags(convertToMicrometerTags(metric.getTags())) - .register(getMeterRegistry()).record(metric.getTotalTimeMSecs(), TimeUnit.MILLISECONDS); + if (metric.getMetricType() == AtlasMetricType.COUNTER) { + Counter.builder(metric.getName()) + .tags(convertToMicrometerTags(metric.getTags())) + .register(getMeterRegistry()) + .increment(metric.getInvocations()); + } else { + Timer.builder(APPLICATION_LEVEL_METRICS_SUMMARY) + .serviceLevelObjectives( + Duration.ofMillis(500), + Duration.ofMillis(750), + Duration.ofMillis(1000), + Duration.ofMillis(1200), + Duration.ofMillis(1500), + Duration.ofSeconds(2), + Duration.ofSeconds(3), + Duration.ofSeconds(4), + Duration.ofSeconds(5), + Duration.ofSeconds(7), + Duration.ofSeconds(10), + Duration.ofSeconds(15), + Duration.ofSeconds(20), + Duration.ofSeconds(25), + Duration.ofSeconds(30), + Duration.ofSeconds(40), + Duration.ofSeconds(60), + Duration.ofSeconds(90), + Duration.ofSeconds(120), + Duration.ofSeconds(180) + ) + .publishPercentiles(PERCENTILES) + .tags(convertToMicrometerTags(metric.getTags())) + .register(getMeterRegistry()).record(metric.getTotalTimeMSecs(), TimeUnit.MILLISECONDS); + } } } catch (Exception e) { LOG.error("Failed to collect metrics", e); diff --git a/common/src/main/java/org/apache/atlas/service/redis/AbstractRedisService.java b/common/src/main/java/org/apache/atlas/service/redis/AbstractRedisService.java index 0ae20a60cc4..9ad5fd904f2 100644 --- a/common/src/main/java/org/apache/atlas/service/redis/AbstractRedisService.java +++ b/common/src/main/java/org/apache/atlas/service/redis/AbstractRedisService.java @@ -32,6 +32,7 @@ public abstract class AbstractRedisService implements RedisService { private static final String ATLAS_METASTORE_SERVICE = "atlas-metastore-service"; RedissonClient redisClient; + RedissonClient redisCacheClient; Map keyLockMap; Configuration atlasConfig; long waitTimeInMS; @@ -71,6 +72,32 @@ public void releaseDistributedLock(String key) { } } + @Override + public String getValue(String key) { + // If value doesn't exist, return null else return the value + return (String) redisCacheClient.getBucket(convertToNamespace(key)).get(); + } + + @Override + public String putValue(String key, String value) { + // Put the value in the redis cache with TTL + redisCacheClient.getBucket(convertToNamespace(key)).set(value); + return value; + } + + @Override + public String putValue(String key, String value, int timeout) { + // Put the value in the redis cache with TTL + redisCacheClient.getBucket(convertToNamespace(key)).set(value, timeout, TimeUnit.SECONDS); + return value; + } + + @Override + public void removeValue(String key) { + // Remove the value from the redis cache + redisCacheClient.getBucket(convertToNamespace(key)).delete(); + } + private String getHostAddress() throws UnknownHostException { return InetAddress.getLocalHost().getHostAddress(); } @@ -85,6 +112,11 @@ private Config initAtlasConfig() throws AtlasException { return redisConfig; } + private String convertToNamespace(String key){ + // Append key with namespace :atlas + return "atlas:"+key; + } + Config getLocalConfig() throws AtlasException { Config config = initAtlasConfig(); config.useSingleServer() @@ -112,6 +144,26 @@ Config getProdConfig() throws AtlasException { return config; } + Config getCacheImplConfig() { + Config config = new Config(); + config.useSentinelServers() + .setClientName(ATLAS_METASTORE_SERVICE+"-redisCache") + .setReadMode(ReadMode.MASTER_SLAVE) + .setCheckSentinelsList(false) + .setKeepAlive(true) + .setMasterConnectionMinimumIdleSize(10) + .setMasterConnectionPoolSize(20) + .setSlaveConnectionMinimumIdleSize(10) + .setSlaveConnectionPoolSize(20) + .setMasterName(atlasConfig.getString(ATLAS_REDIS_MASTER_NAME)) + .addSentinelAddress(formatUrls(atlasConfig.getStringArray(ATLAS_REDIS_SENTINEL_URLS))) + .setUsername(atlasConfig.getString(ATLAS_REDIS_USERNAME)) + .setPassword(atlasConfig.getString(ATLAS_REDIS_PASSWORD)) + .setTimeout(50) //Setting UP timeout to 50ms + .setRetryAttempts(0); + return config; + } + private String[] formatUrls(String[] urls) throws IllegalArgumentException { if (ArrayUtils.isEmpty(urls)) { getLogger().error("Invalid redis cluster urls"); diff --git a/common/src/main/java/org/apache/atlas/service/redis/NoRedisServiceImpl.java b/common/src/main/java/org/apache/atlas/service/redis/NoRedisServiceImpl.java index 96a8fadc99f..9bd942d3042 100644 --- a/common/src/main/java/org/apache/atlas/service/redis/NoRedisServiceImpl.java +++ b/common/src/main/java/org/apache/atlas/service/redis/NoRedisServiceImpl.java @@ -29,6 +29,21 @@ public void releaseDistributedLock(String key) { //do nothing } + @Override + public String getValue(String key) { + return null; + } + + @Override + public String putValue(String key, String value, int timeout) { + return null; + } + + @Override + public void removeValue(String key) { + + } + @Override public Logger getLogger() { return LOG; diff --git a/common/src/main/java/org/apache/atlas/service/redis/RedisService.java b/common/src/main/java/org/apache/atlas/service/redis/RedisService.java index 1475f93e832..a541b1eeeef 100644 --- a/common/src/main/java/org/apache/atlas/service/redis/RedisService.java +++ b/common/src/main/java/org/apache/atlas/service/redis/RedisService.java @@ -8,6 +8,14 @@ public interface RedisService { void releaseDistributedLock(String key); + String getValue(String key); + + String putValue(String key, String value); + + String putValue(String key, String value, int timeout); + + void removeValue(String key); + Logger getLogger(); } diff --git a/common/src/main/java/org/apache/atlas/service/redis/RedisServiceImpl.java b/common/src/main/java/org/apache/atlas/service/redis/RedisServiceImpl.java index 42dec6fa783..48f199473e0 100644 --- a/common/src/main/java/org/apache/atlas/service/redis/RedisServiceImpl.java +++ b/common/src/main/java/org/apache/atlas/service/redis/RedisServiceImpl.java @@ -18,6 +18,7 @@ public class RedisServiceImpl extends AbstractRedisService{ @PostConstruct public void init() throws AtlasException { redisClient = Redisson.create(getProdConfig()); + redisCacheClient = Redisson.create(getCacheImplConfig()); LOG.info("Sentinel redis client created successfully."); } diff --git a/common/src/main/java/org/apache/atlas/service/redis/RedisServiceLocalImpl.java b/common/src/main/java/org/apache/atlas/service/redis/RedisServiceLocalImpl.java index 2eb774920ef..c69a151a7d2 100644 --- a/common/src/main/java/org/apache/atlas/service/redis/RedisServiceLocalImpl.java +++ b/common/src/main/java/org/apache/atlas/service/redis/RedisServiceLocalImpl.java @@ -18,9 +18,25 @@ public class RedisServiceLocalImpl extends AbstractRedisService { @PostConstruct public void init() throws AtlasException { redisClient = Redisson.create(getLocalConfig()); + redisCacheClient = Redisson.create(getLocalConfig()); LOG.info("Local redis client created successfully."); } + @Override + public String getValue(String key) { + return null; + } + + @Override + public String putValue(String key, String value, int timeout) { + return null; + } + + @Override + public void removeValue(String key) { + + } + @Override public Logger getLogger() { return LOG; diff --git a/common/src/main/java/org/apache/atlas/utils/AtlasMetricType.java b/common/src/main/java/org/apache/atlas/utils/AtlasMetricType.java new file mode 100644 index 00000000000..6752b7fbd4d --- /dev/null +++ b/common/src/main/java/org/apache/atlas/utils/AtlasMetricType.java @@ -0,0 +1,9 @@ +package org.apache.atlas.utils; + +public enum AtlasMetricType { + COUNTER, + GAUGE, + HISTOGRAM, + METER, + TIMER +} diff --git a/common/src/main/java/org/apache/atlas/utils/AtlasPerfMetrics.java b/common/src/main/java/org/apache/atlas/utils/AtlasPerfMetrics.java index beebcb6ab19..dd8a101ad5a 100644 --- a/common/src/main/java/org/apache/atlas/utils/AtlasPerfMetrics.java +++ b/common/src/main/java/org/apache/atlas/utils/AtlasPerfMetrics.java @@ -104,6 +104,8 @@ long getElapsedTime() { public static class Metric { private final String name; + + private AtlasMetricType metricType; private long invocations = 0; private long totalTimeMSecs = 0; HashMap tags = new HashMap<>(); @@ -112,6 +114,14 @@ public Metric(String name) { this.name = name; } + public void setMetricType(AtlasMetricType metricType) { + this.metricType = metricType; + } + + public AtlasMetricType getMetricType() { + return metricType; + } + public String getName() { return name; } @@ -135,5 +145,9 @@ public HashMap getTags() { return tags; } + public void incrementInvocations() { + invocations++; + } + } } diff --git a/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasIndexQuery.java b/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasIndexQuery.java index 09fa0d6d060..d31f721b477 100644 --- a/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasIndexQuery.java +++ b/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasIndexQuery.java @@ -26,7 +26,7 @@ import java.util.List; import java.util.Map; import java.util.Set; - +import java.util.ArrayList; /** * A graph query that runs directly against a particular index. * @@ -103,7 +103,7 @@ interface Result { DirectIndexQueryResult getCollapseVertices(String key); Map> getHighLights(); - + ArrayList getSort(); } } diff --git a/graphdb/janus/pom.xml b/graphdb/janus/pom.xml index 5daef76b4ee..75c9079eee1 100644 --- a/graphdb/janus/pom.xml +++ b/graphdb/janus/pom.xml @@ -282,6 +282,10 @@ mockito-all test + + org.apache.atlas + atlas-server-api + diff --git a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasElasticsearchQuery.java b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasElasticsearchQuery.java index 9d2e2489e8a..9aa7cfe8bac 100644 --- a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasElasticsearchQuery.java +++ b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasElasticsearchQuery.java @@ -19,12 +19,15 @@ import org.apache.atlas.AtlasConfiguration; import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.RequestContext; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.discovery.SearchParams; import org.apache.atlas.repository.graphdb.AtlasIndexQuery; import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.graphdb.DirectIndexQueryResult; import org.apache.atlas.type.AtlasType; +import org.apache.atlas.utils.AtlasMetricType; +import org.apache.atlas.utils.AtlasPerfMetrics; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang.StringUtils; @@ -46,6 +49,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.janusgraph.util.encoding.LongEncoding; +import org.redisson.client.RedisException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -171,14 +175,16 @@ private Map runQueryWithLowLevelClient(String query) throws Atla } private DirectIndexQueryResult performAsyncDirectIndexQuery(SearchParams searchParams) throws AtlasBaseException, IOException { + AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("performAsyncDirectIndexQuery"); DirectIndexQueryResult result = null; + boolean contextIdExists = StringUtils.isNotEmpty(searchParams.getSearchContextId()) && searchParams.getSearchContextSequenceNo() != null; try { - if(StringUtils.isNotEmpty(searchParams.getSearchContextId()) && searchParams.getSearchContextSequenceNo() != null) { - // If the search context id and greater sequence no is present, then we need to delete the previous search context async - processRequestWithSameSearchContextId(searchParams); + if(contextIdExists) { + // If the search context id and greater sequence no is present, + // then we need to delete the previous search context async + processRequestWithSameSearchContextId(searchParams); } AsyncQueryResult response = submitAsyncSearch(searchParams, false).get(); - if(response.isRunning()) { /* * If the response is still running, then we need to wait for the response @@ -189,8 +195,8 @@ private DirectIndexQueryResult performAsyncDirectIndexQuery(SearchParams searchP String esSearchId = response.getId(); String searchContextId = searchParams.getSearchContextId(); Integer searchContextSequenceNo = searchParams.getSearchContextSequenceNo(); - if (StringUtils.isNotEmpty(searchContextId) && searchContextSequenceNo != null) { - SearchContextCache.put(searchContextId, searchContextSequenceNo, esSearchId); + if (contextIdExists) { + CompletableFuture.runAsync(() -> SearchContextCache.put(searchContextId, searchContextSequenceNo, esSearchId)); } response = getAsyncSearchResponse(searchParams, esSearchId).get(); if (response == null) { @@ -204,6 +210,16 @@ private DirectIndexQueryResult performAsyncDirectIndexQuery(SearchParams searchP }catch (Exception e) { LOG.error("Failed to execute direct query on ES {}", e.getMessage()); throw new AtlasBaseException(AtlasErrorCode.INDEX_SEARCH_FAILED, e.getMessage()); + } finally { + if (contextIdExists) { + // If the search context id is present, then we need to remove the search context from the cache + try { + CompletableFuture.runAsync(() -> SearchContextCache.remove(searchParams.getSearchContextId())); + } catch (Exception e) { + LOG.error("Failed to remove the search context from the cache {}", e.getMessage()); + } + } + RequestContext.get().endMetricRecord(metric); } return result; } @@ -218,17 +234,31 @@ private DirectIndexQueryResult performAsyncDirectIndexQuery(SearchParams searchP * We also need to check if the search ID exists and delete if necessary */ private void processRequestWithSameSearchContextId(SearchParams searchParams) { - // Extract search context ID and sequence number - String currentSearchContextId = searchParams.getSearchContextId(); - Integer currentSequenceNumber = searchParams.getSearchContextSequenceNo(); - // Get the search ID from the cache if sequence number is greater than the current sequence number - String previousESSearchId = SearchContextCache.getESAsyncSearchIdFromContextCache(currentSearchContextId, currentSequenceNumber); - - if (StringUtils.isNotEmpty(previousESSearchId)) { - LOG.debug("Deleting the previous async search response with ID {}", previousESSearchId); - // If the search ID exists, then we need to delete the search context - deleteAsyncSearchResponse(previousESSearchId); - SearchContextCache.remove(currentSearchContextId); + AtlasPerfMetrics.MetricRecorder funcMetric = RequestContext.get().startMetricRecord("processRequestWithSameSearchContextId"); + try { + // Extract search context ID and sequence number + String currentSearchContextId = searchParams.getSearchContextId(); + Integer currentSequenceNumber = searchParams.getSearchContextSequenceNo(); + // Get the search ID from the cache if sequence number is greater than the current sequence number + String previousESSearchId = SearchContextCache.getESAsyncSearchIdFromContextCache(currentSearchContextId, currentSequenceNumber); + + if (StringUtils.isNotEmpty(previousESSearchId)) { + LOG.debug("Deleting the previous async search response with ID {}", previousESSearchId); + // If the search ID exists, then we need to delete the search context + deleteAsyncSearchResponse(previousESSearchId); + } + } catch (RedisException e) { + AtlasPerfMetrics.Metric failureCounter = new AtlasPerfMetrics.Metric("async_request_redis_failure_counter"); + failureCounter.setMetricType(AtlasMetricType.COUNTER); + failureCounter.incrementInvocations(); + LOG.error("Failed to process the request with the same search context ID {}", e.getMessage()); + RequestContext.get().addApplicationMetrics(failureCounter); + } + catch (Exception e) { + LOG.error("Failed to process the request with the same search context ID {}", e.getMessage()); + } + finally { + RequestContext.get().endMetricRecord(funcMetric); } } @@ -415,7 +445,10 @@ private DirectIndexQueryResult getResultFromResponse(Map if (hits_0 == null) { return result; } - this.vertexTotals = (Integer) hits_0.get("total").get("value"); + LinkedHashMap approximateCount = hits_0.get("total"); + if (approximateCount != null) { + this.vertexTotals = (Integer) approximateCount.get("value"); + } List hits_1 = AtlasType.fromJson(AtlasType.toJson(hits_0.get("hits")), List.class); @@ -511,6 +544,11 @@ public DirectIndexQueryResult getCollapseVerti public Map> getHighLights() { return new HashMap<>(); } + + @Override + public ArrayList getSort() { + return new ArrayList<>(); + } } @@ -576,6 +614,15 @@ public Map> getHighLights() { } return new HashMap<>(); } + + @Override + public ArrayList getSort() { + Object sort = this.hit.get("sort"); + if (Objects.nonNull(sort) && sort instanceof List) { + return (ArrayList) sort; + } + return new ArrayList<>(); + } } public class AsyncQueryResult { diff --git a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusIndexQuery.java b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusIndexQuery.java index d4417e8f6e3..f0959e48c4f 100644 --- a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusIndexQuery.java +++ b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusIndexQuery.java @@ -153,5 +153,10 @@ public DirectIndexQueryResult getCollapseVerti public Map> getHighLights() { return new HashMap<>(); } + + @Override + public ArrayList getSort() { + return new ArrayList<>(); + } } } diff --git a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/SearchContextCache.java b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/SearchContextCache.java index 1780e5d1f90..f7e5718f191 100644 --- a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/SearchContextCache.java +++ b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/SearchContextCache.java @@ -1,49 +1,77 @@ package org.apache.atlas.repository.graphdb.janus; +import org.apache.atlas.RequestContext; +import org.apache.atlas.service.redis.RedisService; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.stereotype.Component; +@Component +public class SearchContextCache { + private static final Logger LOG = LoggerFactory.getLogger(SearchContextCache.class); + private static RedisService redisService = null; -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; + public static final String INVALID_SEQUENCE = "invalid_sequence"; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.TimeUnit; -public class SearchContextCache { - private static final Cache> searchContextCache = CacheBuilder.newBuilder() - .maximumSize(200) - .expireAfterWrite(30, TimeUnit.SECONDS) - .build(); + public SearchContextCache(@Qualifier("redisServiceImpl") RedisService redisService) { + SearchContextCache.redisService = redisService; + } + public static void put(String key, Integer sequence, String esAsyncId) { - HashMap entry = new HashMap<>(); - entry.put(sequence, esAsyncId); - searchContextCache.put(key, entry); + AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("putInCache"); + try { + // Build the string in format `sequence/esAsyncId` and store it in redis + String val = sequence + "/" + esAsyncId; + redisService.putValue(key, val, 30); + } finally { + RequestContext.get().endMetricRecord(metric); + } } - public static HashMap get(String key){ - return searchContextCache.getIfPresent(key); + public static String get(String key) { + try { + return redisService.getValue(key); + } catch (Exception e) { + LOG.error("Error while fetching value from Redis", e); + return null; + } + } public static String getESAsyncSearchIdFromContextCache(String key, Integer sequence){ - //Get the context cache for the given key - HashMap contextCache = get(key); - if(contextCache == null || sequence == null){ - return null; - } - //Find the highest sequence number - int maxStoredSequence = 0; - for (Integer seq : contextCache.keySet()) { - if (seq > maxStoredSequence) { - maxStoredSequence = seq; + AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("getESAsyncSearchIdFromContextCache"); + try { + //Get the context cache for the given key + String contextCache = get(key); + if(contextCache == null || sequence == null){ + return null; } + // Split the context cache to get the sequence and ESAsyncId + String[] contextCacheSplit = contextCache.split("/"); + if(contextCacheSplit.length != 2){ + return null; + } + int seq = Integer.parseInt(contextCacheSplit[0]); + if(sequence > seq){ + return contextCacheSplit[1]; + } else if (sequence < seq) { + return INVALID_SEQUENCE; + } + return null; + } finally { + RequestContext.get().endMetricRecord(metric); } - //If the given sequence is greater than the max stored sequence, return the ESAsyncId else return null - return sequence > maxStoredSequence ? contextCache.getOrDefault(maxStoredSequence, null) : null; - } - public static void remove(String key) { - searchContextCache.invalidate(key); } + public static void remove(String key) { + AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("removeFromCache"); + try { + redisService.removeValue(key); + } finally { + RequestContext.get().endMetricRecord(metric); + } - public static void clear() { - searchContextCache.cleanUp(); } } + diff --git a/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java b/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java index fd2766616f7..1ad37b25649 100644 --- a/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java +++ b/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java @@ -290,8 +290,8 @@ public enum AtlasErrorCode { TASK_INVALID_PARAMETERS(400, "ATLAS-400-00-111", "Invalid parameters for task {0}"), TASK_TYPE_NOT_SUPPORTED(400, "ATLAS-400-00-112", "Task type {0} is not supported"), - PERSONA_POLICY_ASSETS_LIMIT_EXCEEDED(400, "ATLAS-400-00-113", "Exceeded limit of maximum allowed assets across policies for a Persona: Limit: {0}, assets: {1}"); - + PERSONA_POLICY_ASSETS_LIMIT_EXCEEDED(400, "ATLAS-400-00-113", "Exceeded limit of maximum allowed assets across policies for a Persona: Limit: {0}, assets: {1}"), + ADMIN_LIST_SHOULD_NOT_BE_EMPTY(400, "ATLAS-400-00-114", "Admin list should not be empty for type {0}"); private String errorCode; private String errorMessage; diff --git a/intg/src/main/java/org/apache/atlas/model/discovery/AtlasSearchResult.java b/intg/src/main/java/org/apache/atlas/model/discovery/AtlasSearchResult.java index 30d31b28a95..96bc0dc5871 100644 --- a/intg/src/main/java/org/apache/atlas/model/discovery/AtlasSearchResult.java +++ b/intg/src/main/java/org/apache/atlas/model/discovery/AtlasSearchResult.java @@ -37,6 +37,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.LinkedHashMap; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.PUBLIC_ONLY; @@ -59,7 +60,7 @@ public class AtlasSearchResult implements Serializable { private Map aggregations; private Map searchScore; - private Map searchMetadata; + private LinkedHashMap searchMetadata; @@ -162,13 +163,26 @@ public Map getSearchMetadata() { public void addHighlights(String guid, Map> highlights) { if(MapUtils.isEmpty(this.searchMetadata)) { - this.searchMetadata = new HashMap<>(); + this.searchMetadata = new LinkedHashMap<>(); } ElasticsearchMetadata v = this.searchMetadata.getOrDefault(guid, new ElasticsearchMetadata()); v.addHighlights(highlights); this.searchMetadata.put(guid, v); } + public void addSort(String guid, ArrayList sort) { + if(MapUtils.isEmpty(this.searchMetadata)) { + this.searchMetadata = new LinkedHashMap<>(); + } + ElasticsearchMetadata sortMetadata = this.searchMetadata.getOrDefault(guid, new ElasticsearchMetadata()); + sortMetadata.addSort(sort); + if (this.searchMetadata.containsKey(guid)) { + this.searchMetadata.replace(guid, sortMetadata); + } else { + this.searchMetadata.put(guid, sortMetadata); + } + } + @Override public int hashCode() { return Objects.hash(queryType, searchParameters, queryText, type, classification, entities, attributes, fullTextResult, referredEntities, nextMarker); } diff --git a/intg/src/main/java/org/apache/atlas/model/discovery/ElasticsearchMetadata.java b/intg/src/main/java/org/apache/atlas/model/discovery/ElasticsearchMetadata.java index 270ea5e8d80..096ff82b83d 100644 --- a/intg/src/main/java/org/apache/atlas/model/discovery/ElasticsearchMetadata.java +++ b/intg/src/main/java/org/apache/atlas/model/discovery/ElasticsearchMetadata.java @@ -1,15 +1,20 @@ package org.apache.atlas.model.discovery; +import com.fasterxml.jackson.annotation.JsonInclude; import org.apache.commons.collections.MapUtils; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.ArrayList; public class ElasticsearchMetadata { private Map> highlights; + @JsonInclude(JsonInclude.Include.NON_NULL) + private ArrayList sort; + public Map> getHighlights() { return highlights; } @@ -23,6 +28,15 @@ public void addHighlights(Map> highlights) { } } + public Object getSort() { return sort; } + + public void addSort(ArrayList sort) { + if (sort.isEmpty()) { + this.sort = null; + } else { + this.sort = sort; + } + } @Override public String toString() { diff --git a/intg/src/main/java/org/apache/atlas/model/discovery/SearchParams.java b/intg/src/main/java/org/apache/atlas/model/discovery/SearchParams.java index 57000cc84f0..6179f291a5e 100644 --- a/intg/src/main/java/org/apache/atlas/model/discovery/SearchParams.java +++ b/intg/src/main/java/org/apache/atlas/model/discovery/SearchParams.java @@ -19,11 +19,15 @@ public class SearchParams { boolean excludeMeanings; boolean excludeClassifications; + boolean includeClassificationNames = false; + RequestMetadata requestMetadata = new RequestMetadata(); Async async = new Async(); boolean showHighlights; + boolean showSearchMetadata; + public String getQuery() { return getQuery(); } @@ -100,6 +104,14 @@ public void setExcludeMeanings(boolean excludeMeanings) { this.excludeMeanings = excludeMeanings; } + public boolean isIncludeClassificationNames() { + return includeClassificationNames; + } + + public void setIncludeClassificationNames(boolean includeClassificationNames) { + this.includeClassificationNames = includeClassificationNames; + } + public boolean isSaveSearchLog() { return requestMetadata.saveSearchLog; } @@ -144,10 +156,14 @@ public String getSearchInput() { return this.requestMetadata.getSearchInput(); } - public boolean isShowHighlights() { + public boolean getShowHighlights() { return showHighlights; } + public boolean getShowSearchMetadata() { + return showSearchMetadata; + } + static class RequestMetadata { private String searchInput; diff --git a/intg/src/main/java/org/apache/atlas/model/instance/AtlasEntityHeader.java b/intg/src/main/java/org/apache/atlas/model/instance/AtlasEntityHeader.java index be2819d7f20..c251811df13 100644 --- a/intg/src/main/java/org/apache/atlas/model/instance/AtlasEntityHeader.java +++ b/intg/src/main/java/org/apache/atlas/model/instance/AtlasEntityHeader.java @@ -71,8 +71,13 @@ public class AtlasEntityHeader extends AtlasStruct implements Serializable { private Date createTime = null; private Date updateTime = null; private String deleteHandler = null; + private Integer depth = null; + private Integer traversalOrder = null; + private Integer finishTime = null; + private Map collapse = null; + public AtlasEntityHeader() { this(null, null); } @@ -146,12 +151,24 @@ public AtlasEntityHeader(AtlasEntity entity) { } } - public String getGuid() { - return guid; + public String getGuid() { return guid; } + + public void setGuid(String guid) { this.guid = guid; } + + public Integer getDepth() { return depth; } + + public void setDepth(Integer depth) { this.depth = depth; } + + public Integer getTraversalOrder() { return traversalOrder; } + + public void setTraversalOrder(Integer traversalOrder) { this.traversalOrder = traversalOrder; } + + public Integer getFinishTime() { + return finishTime; } - public void setGuid(String guid) { - this.guid = guid; + public void setFinishTime(Integer finishTime) { + this.finishTime = finishTime; } public AtlasEntity.Status getStatus() { diff --git a/intg/src/main/java/org/apache/atlas/type/AtlasTypeRegistry.java b/intg/src/main/java/org/apache/atlas/type/AtlasTypeRegistry.java index 4cb7e4a185f..82628ce73e5 100644 --- a/intg/src/main/java/org/apache/atlas/type/AtlasTypeRegistry.java +++ b/intg/src/main/java/org/apache/atlas/type/AtlasTypeRegistry.java @@ -44,6 +44,9 @@ public class AtlasTypeRegistry { private static final Logger LOG = LoggerFactory.getLogger(AtlasTypeRegistry.class); private static final int DEFAULT_LOCK_MAX_WAIT_TIME_IN_SECONDS = 15; + public static final ArrayList TYPENAMES_TO_SKIP_SUPER_TYPE_CHECK = new ArrayList() {{ + add("Table"); + }}; protected RegistryData registryData; private final TypeRegistryUpdateSynchronizer updateSynchronizer; private final Set missingRelationshipDefs; diff --git a/repository/pom.xml b/repository/pom.xml index 10d8d876fb1..a2a1a4198f0 100755 --- a/repository/pom.xml +++ b/repository/pom.xml @@ -322,6 +322,12 @@ 3.0.0-SNAPSHOT + + org.hibernate + hibernate-validator + 4.3.2.Final + + diff --git a/repository/src/main/java/org/apache/atlas/discovery/EntityDiscoveryService.java b/repository/src/main/java/org/apache/atlas/discovery/EntityDiscoveryService.java index b1b120c3fbb..2a3390cfc99 100644 --- a/repository/src/main/java/org/apache/atlas/discovery/EntityDiscoveryService.java +++ b/repository/src/main/java/org/apache/atlas/discovery/EntityDiscoveryService.java @@ -44,7 +44,9 @@ import org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2; import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; import org.apache.atlas.repository.userprofile.UserProfileService; +import org.apache.atlas.repository.util.AccessControlUtils; import org.apache.atlas.searchlog.ESSearchLogger; +import org.apache.atlas.service.FeatureFlagStore; import org.apache.atlas.stats.StatsClient; import org.apache.atlas.type.*; import org.apache.atlas.type.AtlasBuiltInTypes.AtlasObjectIdType; @@ -75,9 +77,7 @@ import static org.apache.atlas.SortOrder.ASCENDING; import static org.apache.atlas.model.instance.AtlasEntity.Status.ACTIVE; import static org.apache.atlas.model.instance.AtlasEntity.Status.DELETED; -import static org.apache.atlas.repository.Constants.ASSET_ENTITY_TYPE; -import static org.apache.atlas.repository.Constants.OWNER_ATTRIBUTE; -import static org.apache.atlas.repository.Constants.VERTEX_INDEX_NAME; +import static org.apache.atlas.repository.Constants.*; import static org.apache.atlas.util.AtlasGremlinQueryProvider.AtlasGremlinQuery.BASIC_SEARCH_STATE_FILTER; import static org.apache.atlas.util.AtlasGremlinQueryProvider.AtlasGremlinQuery.TO_RANGE_LIST; @@ -1098,8 +1098,10 @@ private void prepareSearchResult(AtlasSearchResult ret, DirectIndexQueryResult i header.setCollapse(collapse); } } - - if (searchParams.isShowHighlights()) { + if (searchParams.getShowSearchMetadata()) { + ret.addHighlights(header.getGuid(), result.getHighLights()); + ret.addSort(header.getGuid(), result.getSort()); + } else if (searchParams.getShowHighlights()) { ret.addHighlights(header.getGuid(), result.getHighLights()); } @@ -1134,8 +1136,10 @@ public List searchUsingTermQualifiedName(int from, int size, } private String getIndexName(IndexSearchParams params) throws AtlasBaseException { + String vertexIndexName = getESIndex(); + if (StringUtils.isEmpty(params.getPersona()) && StringUtils.isEmpty(params.getPurpose())) { - return VERTEX_INDEX_NAME; + return vertexIndexName; } String qualifiedName = ""; @@ -1145,13 +1149,12 @@ private String getIndexName(IndexSearchParams params) throws AtlasBaseException qualifiedName = params.getPurpose(); } - String[] parts = qualifiedName.split("/"); - String aliasName = parts[parts.length - 1]; + String aliasName = AccessControlUtils.getESAliasName(qualifiedName); if (StringUtils.isNotEmpty(aliasName)) { if(params.isAccessControlExclusive()) { accessControlExclusiveDsl(params, aliasName); - aliasName = aliasName+","+VERTEX_INDEX_NAME; + aliasName = aliasName+","+vertexIndexName; } return aliasName; } else { diff --git a/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java b/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java index f3fc72e4e79..6fd6298ab5a 100644 --- a/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java +++ b/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java @@ -283,6 +283,7 @@ private AtlasLineageOnDemandInfo getLineageInfoOnDemand(String guid, AtlasLineag LineageOnDemandConstraints lineageConstraintsByGuid = getAndValidateLineageConstraintsByGuid(guid, atlasLineageOnDemandContext); AtlasLineageOnDemandInfo.LineageDirection direction = lineageConstraintsByGuid.getDirection(); + int level = 0; int depth = lineageConstraintsByGuid.getDepth(); AtlasLineageOnDemandInfo ret = initializeLineageOnDemandInfo(guid); @@ -293,33 +294,42 @@ private AtlasLineageOnDemandInfo getLineageInfoOnDemand(String guid, AtlasLineag AtomicInteger inputEntitiesTraversed = new AtomicInteger(0); AtomicInteger outputEntitiesTraversed = new AtomicInteger(0); + AtomicInteger traversalOrder = new AtomicInteger(1); if (isDataSet) { AtlasVertex datasetVertex = AtlasGraphUtilsV2.findByGuid(this.graph, guid); if (direction == AtlasLineageOnDemandInfo.LineageDirection.INPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH) - traverseEdgesOnDemand(datasetVertex, true, depth, new HashSet<>(), atlasLineageOnDemandContext, ret, guid, inputEntitiesTraversed); + traverseEdgesOnDemand(datasetVertex, true, depth, level, new HashSet<>(), atlasLineageOnDemandContext, ret, guid, inputEntitiesTraversed, traversalOrder); if (direction == AtlasLineageOnDemandInfo.LineageDirection.OUTPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH) - traverseEdgesOnDemand(datasetVertex, false, depth, new HashSet<>(), atlasLineageOnDemandContext, ret, guid, outputEntitiesTraversed); + traverseEdgesOnDemand(datasetVertex, false, depth, level, new HashSet<>(), atlasLineageOnDemandContext, ret, guid, outputEntitiesTraversed, traversalOrder); AtlasEntityHeader baseEntityHeader = entityRetriever.toAtlasEntityHeader(datasetVertex, atlasLineageOnDemandContext.getAttributes()); + setGraphTraversalMetadata(level, traversalOrder, baseEntityHeader); ret.getGuidEntityMap().put(guid, baseEntityHeader); } else { AtlasVertex processVertex = AtlasGraphUtilsV2.findByGuid(this.graph, guid); // make one hop to the next dataset vertices from process vertex and traverse with 'depth = depth - 1' if (direction == AtlasLineageOnDemandInfo.LineageDirection.INPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH) { Iterator processEdges = processVertex.getEdges(AtlasEdgeDirection.OUT, PROCESS_INPUTS_EDGE).iterator(); - traverseEdgesOnDemand(processEdges, true, depth, atlasLineageOnDemandContext, ret, processVertex, guid, inputEntitiesTraversed); + traverseEdgesOnDemand(processEdges, true, depth, level, atlasLineageOnDemandContext, ret, processVertex, guid, inputEntitiesTraversed, traversalOrder); } if (direction == AtlasLineageOnDemandInfo.LineageDirection.OUTPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH) { Iterator processEdges = processVertex.getEdges(AtlasEdgeDirection.OUT, PROCESS_OUTPUTS_EDGE).iterator(); - traverseEdgesOnDemand(processEdges, false, depth, atlasLineageOnDemandContext, ret, processVertex, guid, outputEntitiesTraversed); + traverseEdgesOnDemand(processEdges, false, depth, level, atlasLineageOnDemandContext, ret, processVertex, guid, outputEntitiesTraversed, traversalOrder); } } RequestContext.get().endMetricRecord(metricRecorder); return ret; } + private static void setGraphTraversalMetadata(int level, AtomicInteger traversalOrder, AtlasEntityHeader baseEntityHeader) { + baseEntityHeader.setDepth(level); + baseEntityHeader.setTraversalOrder(0); + baseEntityHeader.setFinishTime(traversalOrder.get()); + } - private void traverseEdgesOnDemand(Iterator processEdges, boolean isInput, int depth, AtlasLineageOnDemandContext atlasLineageOnDemandContext, AtlasLineageOnDemandInfo ret, AtlasVertex processVertex, String baseGuid, AtomicInteger entitiesTraversed) throws AtlasBaseException { + private void traverseEdgesOnDemand(Iterator processEdges, boolean isInput, int depth, int level, AtlasLineageOnDemandContext atlasLineageOnDemandContext, AtlasLineageOnDemandInfo ret, AtlasVertex processVertex, String baseGuid, AtomicInteger entitiesTraversed, AtomicInteger traversalOrder) throws AtlasBaseException { AtlasLineageOnDemandInfo.LineageDirection direction = isInput ? AtlasLineageOnDemandInfo.LineageDirection.INPUT : AtlasLineageOnDemandInfo.LineageDirection.OUTPUT; + int nextLevel = isInput ? level - 1: level + 1; + while (processEdges.hasNext()) { AtlasEdge processEdge = processEdges.next(); AtlasVertex datasetVertex = processEdge.getInVertex(); @@ -336,7 +346,8 @@ private void traverseEdgesOnDemand(Iterator processEdges, boolean isI if (incrementAndCheckIfRelationsLimitReached(processEdge, isInputEdge, atlasLineageOnDemandContext, ret, depth, entitiesTraversed, direction)) { break; } else { - addEdgeToResult(processEdge, ret, atlasLineageOnDemandContext); + addEdgeToResult(processEdge, ret, atlasLineageOnDemandContext, nextLevel, traversalOrder); + traversalOrder.incrementAndGet(); } String inGuid = AtlasGraphUtilsV2.getIdFromVertex(datasetVertex); @@ -346,17 +357,17 @@ private void traverseEdgesOnDemand(Iterator processEdges, boolean isI ret.getRelationsOnDemand().put(inGuid, new LineageInfoOnDemand(inGuidLineageConstrains)); } - traverseEdgesOnDemand(datasetVertex, isInput, depth - 1, new HashSet<>(), atlasLineageOnDemandContext, ret, baseGuid, entitiesTraversed); + traverseEdgesOnDemand(datasetVertex, isInput, depth - 1, nextLevel, new HashSet<>(), atlasLineageOnDemandContext, ret, baseGuid, entitiesTraversed, traversalOrder); } } - private void traverseEdgesOnDemand(AtlasVertex datasetVertex, boolean isInput, int depth, Set visitedVertices, AtlasLineageOnDemandContext atlasLineageOnDemandContext, AtlasLineageOnDemandInfo ret, String baseGuid, AtomicInteger entitiesTraversed) throws AtlasBaseException { + private void traverseEdgesOnDemand(AtlasVertex datasetVertex, boolean isInput, int depth, int level, Set visitedVertices, AtlasLineageOnDemandContext atlasLineageOnDemandContext, AtlasLineageOnDemandInfo ret, String baseGuid, AtomicInteger entitiesTraversed, AtomicInteger traversalOrder) throws AtlasBaseException { if (isEntityTraversalLimitReached(entitiesTraversed)) return; if (depth != 0) { // base condition of recursion for depth AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("traverseEdgesOnDemand"); AtlasLineageOnDemandInfo.LineageDirection direction = isInput ? AtlasLineageOnDemandInfo.LineageDirection.INPUT : AtlasLineageOnDemandInfo.LineageDirection.OUTPUT; - + int nextLevel = isInput ? level - 1: level + 1; // keep track of visited vertices to avoid circular loop visitedVertices.add(getId(datasetVertex)); @@ -385,7 +396,7 @@ private void traverseEdgesOnDemand(AtlasVertex datasetVertex, boolean isInput, i else continue; } else { - addEdgeToResult(incomingEdge, ret, atlasLineageOnDemandContext); + addEdgeToResult(incomingEdge, ret, atlasLineageOnDemandContext, level, traversalOrder); } AtlasPerfMetrics.MetricRecorder traverseEdgesOnDemandGetEdgesOut = RequestContext.get().startMetricRecord("traverseEdgesOnDemandGetEdgesOut"); @@ -413,13 +424,16 @@ private void traverseEdgesOnDemand(AtlasVertex datasetVertex, boolean isInput, i else continue; } else { - addEdgeToResult(outgoingEdge, ret, atlasLineageOnDemandContext); + addEdgeToResult(outgoingEdge, ret, atlasLineageOnDemandContext, nextLevel, traversalOrder); entitiesTraversed.incrementAndGet(); + traversalOrder.incrementAndGet(); if (isEntityTraversalLimitReached(entitiesTraversed)) setEntityLimitReachedFlag(isInput, ret); } if (entityVertex != null && !visitedVertices.contains(getId(entityVertex))) { - traverseEdgesOnDemand(entityVertex, isInput, depth - 1, visitedVertices, atlasLineageOnDemandContext, ret, baseGuid, entitiesTraversed); // execute inner depth + traverseEdgesOnDemand(entityVertex, isInput, depth - 1, nextLevel, visitedVertices, atlasLineageOnDemandContext, ret, baseGuid, entitiesTraversed, traversalOrder); // execute inner depth + AtlasEntityHeader traversedEntity = ret.getGuidEntityMap().get(AtlasGraphUtilsV2.getIdFromVertex(entityVertex)); + traversedEntity.setFinishTime(traversalOrder.get()); } } } @@ -444,11 +458,18 @@ private void traverseEdgesUsingBFS(String baseGuid, AtlasLineageListContext line Queue traversalQueue = new LinkedList<>(); AtlasVertex baseVertex = AtlasGraphUtilsV2.findByGuid(this.graph, baseGuid); - enqueueNeighbours(baseVertex, validateEntityTypeAndCheckIfDataSet(baseGuid), lineageListContext, traversalQueue, visitedVertices, skippedVertices); + boolean isBaseNodeDataset = validateEntityTypeAndCheckIfDataSet(baseGuid); + enqueueNeighbours(baseVertex, isBaseNodeDataset, lineageListContext, traversalQueue, visitedVertices, skippedVertices); int currentDepth = 0; + int currentLevel = isBaseNodeDataset? 0: 1; while (!traversalQueue.isEmpty() && !lineageListContext.isEntityLimitReached() && currentDepth < lineageListContext.getDepth()) { currentDepth++; + + // update level at every alternate depth + if ((isBaseNodeDataset && currentDepth % 2 != 0) || (!isBaseNodeDataset && currentDepth % 2 == 0)) + currentLevel++; + int entitiesInCurrentDepth = traversalQueue.size(); for (int i = 0; i < entitiesInCurrentDepth; i++) { if (lineageListContext.isEntityLimitReached()) @@ -471,7 +492,7 @@ private void traverseEdgesUsingBFS(String baseGuid, AtlasLineageListContext line } lineageListContext.incrementEntityCount(); - appendToResult(currentVertex, lineageListContext, ret); + appendToResult(currentVertex, lineageListContext, ret, currentLevel); enqueueNeighbours(currentVertex, isDataset, lineageListContext, traversalQueue, visitedVertices, skippedVertices); if (isLastEntityInLastDepth(lineageListContext.getDepth(), currentDepth, entitiesInCurrentDepth, i)) { ret.setHasMore(false); @@ -518,8 +539,10 @@ private void enqueueNeighbours(AtlasVertex currentVertex, boolean isDataset, Atl } } - private void appendToResult(AtlasVertex currentVertex, AtlasLineageListContext lineageListContext, AtlasLineageListInfo ret) throws AtlasBaseException { - ret.getEntities().add(entityRetriever.toAtlasEntityHeader(currentVertex, lineageListContext.getAttributes())); + private void appendToResult(AtlasVertex currentVertex, AtlasLineageListContext lineageListContext, AtlasLineageListInfo ret, int currentLevel) throws AtlasBaseException { + AtlasEntityHeader entity = entityRetriever.toAtlasEntityHeader(currentVertex, lineageListContext.getAttributes()); + entity.setDepth(currentLevel); + ret.getEntities().add(entity); } private static void addEntitiesToCache(AtlasVertex vertex) { @@ -657,7 +680,7 @@ private List getFilteredAtlasEdges(AtlasVertex outVertex, AtlasEdgeDi } private boolean isEntityTraversalLimitReached(AtomicInteger entitiesTraversed) { - return entitiesTraversed.get() == getLineageMaxNodeAllowedCount(); + return entitiesTraversed.get() >= getLineageMaxNodeAllowedCount(); } @Override @@ -878,9 +901,9 @@ private void addEdgeToResult(AtlasEdge edge, AtlasLineageInfo lineageInfo, } } - private void addEdgeToResult(AtlasEdge edge, AtlasLineageOnDemandInfo lineageInfo, AtlasLineageOnDemandContext atlasLineageOnDemandContext) throws AtlasBaseException { + private void addEdgeToResult(AtlasEdge edge, AtlasLineageOnDemandInfo lineageInfo, AtlasLineageOnDemandContext atlasLineageOnDemandContext, int level, AtomicInteger traversalOrder) throws AtlasBaseException { if (!lineageContainsVisitedEdgeV2(lineageInfo, edge)) { - processEdge(edge, lineageInfo, atlasLineageOnDemandContext); + processEdge(edge, lineageInfo, atlasLineageOnDemandContext, level, traversalOrder); } } @@ -1450,26 +1473,41 @@ private void processEdge(final AtlasEdge edge, final Map entities, final Set relations, final Set visitedEdges, final Set attributes) throws AtlasBaseException { + private void processEdge(final AtlasEdge edge, final Map entities, final Set relations, final Set visitedEdges, final Set attributes, int level, AtomicInteger traversalOrder) throws AtlasBaseException { AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processEdge"); AtlasVertex inVertex = edge.getInVertex(); AtlasVertex outVertex = edge.getOutVertex(); + + String inTypeName = AtlasGraphUtilsV2.getTypeName(inVertex); + AtlasEntityType inEntityType = atlasTypeRegistry.getEntityTypeByName(inTypeName); + if (inEntityType == null) { + throw new AtlasBaseException(AtlasErrorCode.TYPE_NAME_NOT_FOUND, inTypeName); + } + boolean inIsProcess = inEntityType.getTypeAndAllSuperTypes().contains(PROCESS_SUPER_TYPE); + String inGuid = AtlasGraphUtilsV2.getIdFromVertex(inVertex); String outGuid = AtlasGraphUtilsV2.getIdFromVertex(outVertex); String relationGuid = AtlasGraphUtilsV2.getEncodedProperty(edge, RELATIONSHIP_GUID_PROPERTY_KEY, String.class); boolean isInputEdge = edge.getLabel().equalsIgnoreCase(PROCESS_INPUTS_EDGE); - if (!entities.containsKey(inGuid)) { AtlasEntityHeader entityHeader = entityRetriever.toAtlasEntityHeader(inVertex, attributes); + if (!inIsProcess) { + entityHeader.setDepth(level); + entityHeader.setTraversalOrder(traversalOrder.get()); + } entities.put(inGuid, entityHeader); } if (!entities.containsKey(outGuid)) { AtlasEntityHeader entityHeader = entityRetriever.toAtlasEntityHeader(outVertex, attributes); + if (inIsProcess) { + entityHeader.setDepth(level); + entityHeader.setTraversalOrder(traversalOrder.get()); + } entities.put(outGuid, entityHeader); } if (isInputEdge) { diff --git a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java index c1aef7b5098..14fd5563df9 100644 --- a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java +++ b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java @@ -322,6 +322,16 @@ public AtlasEntity getAndCacheEntity(String guid, boolean ignoreRelationshipAttr return entity; } + public AtlasEntity getEntity(String guid, boolean ignoreRelationshipAttributes) throws AtlasBaseException { + AtlasEntity entity = null; + if (ignoreRelationshipAttributes) { + entity = entityGraphRetrieverIgnoreRelationshipAttrs.toAtlasEntity(guid); + } else { + entity = entityGraphRetriever.toAtlasEntity(guid); + } + return entity; + } + public AtlasEntityWithExtInfo getAndCacheEntityExtInfo(String guid) throws AtlasBaseException { RequestContext context = RequestContext.get(); diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java b/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java index 987ceec296c..179d915df98 100755 --- a/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java +++ b/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java @@ -73,6 +73,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.stream.Collectors; import static org.apache.atlas.AtlasErrorCode.RELATIONSHIP_CREATE_INVALID_PARAMS; import static org.apache.atlas.model.instance.AtlasEntity.Status.ACTIVE; @@ -376,6 +377,37 @@ public static AtlasVertex getClassificationVertex(AtlasVertex entityVertex, Stri return ret; } + public static List getAllClassificationVerticesByClassificationName(AtlasGraph graph, String classificationName) { + Iterable vertices = graph.query().has(TYPE_NAME_PROPERTY_KEY, classificationName).vertices(); + if (vertices == null) { + return Collections.emptyList(); + } + return IteratorUtils.toList(vertices.iterator()); + } + + public static List getAllAssetsWithClassificationAttached(AtlasGraph graph, String classificationName) { + Iterable classificationVertices = graph.query().has(TYPE_NAME_PROPERTY_KEY, classificationName).vertices(); + if (classificationVertices == null) { + return Collections.emptyList(); + } + List classificationVerticesList = IteratorUtils.toList(classificationVertices.iterator()); + LOG.info("classificationVerticesList size: {}", classificationVerticesList.size()); + HashSet entityVerticesSet = new HashSet<>(); + for (AtlasVertex classificationVertex : classificationVerticesList) { + Iterable attachedVertices = classificationVertex.query() + .direction(AtlasEdgeDirection.IN) + .label(CLASSIFICATION_LABEL).vertices(); + if (attachedVertices != null) { + Iterator attachedVerticesIterator = attachedVertices.iterator(); + while (attachedVerticesIterator.hasNext()) { + entityVerticesSet.add(attachedVerticesIterator.next()); + } + LOG.info("entityVerticesSet size: {}", entityVerticesSet.size()); + } + } + + return entityVerticesSet.stream().collect(Collectors.toList()); + } public static AtlasEdge getClassificationEdge(AtlasVertex entityVertex, AtlasVertex classificationVertex) { AtlasEdge ret = null; Iterable edges = entityVertex.query().direction(AtlasEdgeDirection.OUT).label(CLASSIFICATION_LABEL) @@ -785,7 +817,18 @@ public static List getTraitNames(AtlasVertex entityVertex) { public static List getPropagatedTraitNames(AtlasVertex entityVertex) { return getTraitNames(entityVertex, true); } - + public static List getAllTraitNamesFromAttribute(AtlasVertex entityVertex) { + List ret = new ArrayList<>(); + List traitNames = entityVertex.getMultiValuedProperty(TRAIT_NAMES_PROPERTY_KEY, String.class); + if (traitNames != null) { + ret.addAll(traitNames); + } + List propagatedTraitNames = entityVertex.getMultiValuedProperty(PROPAGATED_TRAIT_NAMES_PROPERTY_KEY, String.class); + if (propagatedTraitNames != null) { + ret.addAll(propagatedTraitNames); + } + return ret; + } public static List getAllTraitNames(AtlasVertex entityVertex) { return getTraitNames(entityVertex, null); } @@ -897,18 +940,18 @@ public static boolean propagatedClassificationAttachedToVertex(AtlasVertex class } public static List getClassificationEdges(AtlasVertex entityVertex) { - return getClassificationEdges(entityVertex, false); + return getClassificationEdges(entityVertex, false, null); } public static List getPropagatedClassificationEdges(AtlasVertex entityVertex) { - return getClassificationEdges(entityVertex, true); + return getClassificationEdges(entityVertex, true, null); } public static List getAllClassificationEdges(AtlasVertex entityVertex) { - return getClassificationEdges(entityVertex, null); + return getClassificationEdges(entityVertex, null, null); } - public static List getClassificationEdges(AtlasVertex entityVertex, Boolean propagated) { + public static List getClassificationEdges(AtlasVertex entityVertex, Boolean propagated, String typeName) { List ret = new ArrayList<>(); AtlasVertexQuery query = entityVertex.query().direction(AtlasEdgeDirection.OUT).label(CLASSIFICATION_LABEL); @@ -916,6 +959,10 @@ public static List getClassificationEdges(AtlasVertex entityVertex, B query = query.has(CLASSIFICATION_EDGE_IS_PROPAGATED_PROPERTY_KEY, propagated); } + if (StringUtils.isNotEmpty(typeName)) { + query = query.has(CLASSIFICATION_EDGE_NAME_PROPERTY_KEY, typeName); + } + Iterable edges = query.edges(); if (edges != null) { @@ -1913,6 +1960,28 @@ public static Iterator getActiveVertices(AtlasVertex vertex, String } } + public static Iterator getAllChildrenVertices(AtlasVertex vertex, String childrenEdgeLabel) throws AtlasBaseException { + return getAllVertices(vertex, childrenEdgeLabel, AtlasEdgeDirection.OUT); + } + + public static Iterator getAllVertices(AtlasVertex vertex, String childrenEdgeLabel, AtlasEdgeDirection direction) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("CategoryPreProcessor.getEdges"); + + try { + return vertex.query() + .direction(direction) + .label(childrenEdgeLabel) + .vertices() + .iterator(); + } catch (Exception e) { + LOG.error("Error while getting all children of category for edge label " + childrenEdgeLabel, e); + throw new AtlasBaseException(AtlasErrorCode.INTERNAL_ERROR, e); + } + finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + private static Set parseLabelsString(String labels) { Set ret = new HashSet<>(); diff --git a/repository/src/main/java/org/apache/atlas/repository/store/aliasstore/ESAliasStore.java b/repository/src/main/java/org/apache/atlas/repository/store/aliasstore/ESAliasStore.java index cd53a5c279f..1991106b965 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/aliasstore/ESAliasStore.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/aliasstore/ESAliasStore.java @@ -213,7 +213,7 @@ private void personaPolicyToESDslClauses(List policies, for (String asset : assets) { terms.add(asset); - allowClauseList.add(mapOf("wildcard", mapOf(QUALIFIED_NAME, asset + "/*"))); + allowClauseList.add(mapOf("wildcard", mapOf(QUALIFIED_NAME, asset + "*"))); } } else if (getPolicyActions(policy).contains(ACCESS_READ_PERSONA_SUB_DOMAIN)) { diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasEntityStore.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasEntityStore.java index 02022fe0401..912799cdd6d 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasEntityStore.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasEntityStore.java @@ -25,7 +25,6 @@ import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.instance.AtlasEntityHeaders; import org.apache.atlas.model.instance.AtlasObjectId; import org.apache.atlas.model.instance.AtlasHasLineageRequests; import org.apache.atlas.model.instance.EntityMutationResponse; @@ -266,6 +265,11 @@ EntityMutationResponse deleteByUniqueAttributes(List objectIds) */ EntityMutationResponse deleteByIds(List guid) throws AtlasBaseException; + /* + * Repair classification mappings + */ + public void repairClassificationMappings(final String guid) throws AtlasBaseException; + /* * Return list of deleted entity guids */ @@ -359,4 +363,6 @@ EntityMutationResponse deleteByUniqueAttributes(List objectIds) void repairMeaningAttributeForTerms(List termGuids) throws AtlasBaseException; + void repairAccesscontrolAlias(String guid) throws AtlasBaseException; + } diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityStoreV2.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityStoreV2.java index 6e3487f8d37..83df0fe2efe 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityStoreV2.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityStoreV2.java @@ -49,6 +49,7 @@ import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.patches.PatchContext; import org.apache.atlas.repository.patches.ReIndexPatch; +import org.apache.atlas.repository.store.aliasstore.ESAliasStore; import org.apache.atlas.repository.store.graph.AtlasEntityStore; import org.apache.atlas.repository.store.graph.AtlasRelationshipStore; import org.apache.atlas.repository.store.graph.EntityGraphDiscovery; @@ -58,10 +59,15 @@ import org.apache.atlas.repository.store.graph.v1.RestoreHandlerV1; import org.apache.atlas.repository.store.graph.v2.preprocessor.AuthPolicyPreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.ConnectionPreProcessor; +import org.apache.atlas.repository.store.graph.v2.preprocessor.accesscontrol.StakeholderPreProcessor; +import org.apache.atlas.repository.store.graph.v2.preprocessor.contract.ContractPreProcessor; +import org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh.StakeholderTitlePreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.resource.LinkPreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.accesscontrol.PersonaPreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.accesscontrol.PurposePreProcessor; +import org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh.DataProductPreProcessor; +import org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh.DataDomainPreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.glossary.CategoryPreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.glossary.GlossaryPreProcessor; import org.apache.atlas.repository.store.graph.v2.preprocessor.glossary.TermPreProcessor; @@ -110,6 +116,7 @@ import static org.apache.atlas.repository.graph.GraphHelper.getStatus; import static org.apache.atlas.repository.store.graph.v2.EntityGraphMapper.validateLabels; import static org.apache.atlas.repository.store.graph.v2.tasks.MeaningsTaskFactory.*; +import static org.apache.atlas.repository.util.AccessControlUtils.REL_ATTR_POLICIES; import static org.apache.atlas.type.Constants.HAS_LINEAGE; import static org.apache.atlas.type.Constants.HAS_LINEAGE_VALID; import static org.apache.atlas.type.Constants.MEANINGS_TEXT_PROPERTY_KEY; @@ -142,6 +149,8 @@ public class AtlasEntityStoreV2 implements AtlasEntityStore { private final AtlasRelationshipStore atlasRelationshipStore; private final FeatureFlagStore featureFlagStore; + private final ESAliasStore esAliasStore; + @Inject public AtlasEntityStoreV2(AtlasGraph graph, DeleteHandlerDelegate deleteDelegate, RestoreHandlerV1 restoreHandlerV1, AtlasTypeRegistry typeRegistry, IAtlasEntityChangeNotifier entityChangeNotifier, EntityGraphMapper entityGraphMapper, TaskManagement taskManagement, @@ -158,6 +167,7 @@ public AtlasEntityStoreV2(AtlasGraph graph, DeleteHandlerDelegate deleteDelegate this.taskManagement = taskManagement; this.atlasRelationshipStore = atlasRelationshipStore; this.featureFlagStore = featureFlagStore; + this.esAliasStore = new ESAliasStore(graph, entityRetriever); try { this.discovery = new EntityDiscoveryService(typeRegistry, graph, null, null, null, null); @@ -913,6 +923,30 @@ public String getGuidByUniqueAttributes(AtlasEntityType entityType, Map repairClassificationMappings({})", guid); + } + + if (StringUtils.isEmpty(guid)) { + throw new AtlasBaseException(AtlasErrorCode.INSTANCE_GUID_NOT_FOUND, guid); + } + + AtlasVertex entityVertex = AtlasGraphUtilsV2.findByGuid(graph, guid); + + if (entityVertex == null) { + throw new AtlasBaseException(AtlasErrorCode.INSTANCE_GUID_NOT_FOUND, guid); + } + + entityGraphMapper.repairClassificationMappings(entityVertex); + + if (LOG.isDebugEnabled()) { + LOG.debug("<== repairClassificationMappings({})", guid); + } + } + @Override @GraphTransaction public void addClassifications(final String guid, final List classifications) throws AtlasBaseException { @@ -1535,8 +1569,7 @@ private void executePreProcessor(EntityMutationContext context) throws AtlasBase PreProcessor preProcessor; List copyOfCreated = new ArrayList<>(context.getCreatedEntities()); - for (int i = 0; i < copyOfCreated.size() ; i++) { - AtlasEntity entity = ((List) context.getCreatedEntities()).get(i); + for (AtlasEntity entity : copyOfCreated) { entityType = context.getType(entity.getGuid()); preProcessor = getPreProcessor(entityType.getTypeName()); @@ -1546,8 +1579,7 @@ private void executePreProcessor(EntityMutationContext context) throws AtlasBase } List copyOfUpdated = new ArrayList<>(context.getUpdatedEntities()); - for (int i = 0; i < copyOfUpdated.size() ; i++) { - AtlasEntity entity = ((List) context.getUpdatedEntities()).get(i); + for (AtlasEntity entity: copyOfUpdated) { entityType = context.getType(entity.getGuid()); preProcessor = getPreProcessor(entityType.getTypeName()); @@ -1800,6 +1832,14 @@ public PreProcessor getPreProcessor(String typeName) { preProcessor = new CategoryPreProcessor(typeRegistry, entityRetriever, graph, taskManagement, entityGraphMapper); break; + case DATA_DOMAIN_ENTITY_TYPE: + preProcessor = new DataDomainPreProcessor(typeRegistry, entityRetriever, graph); + break; + + case DATA_PRODUCT_ENTITY_TYPE: + preProcessor = new DataProductPreProcessor(typeRegistry, entityRetriever, graph, this); + break; + case QUERY_ENTITY_TYPE: preProcessor = new QueryPreProcessor(typeRegistry, entityRetriever); break; @@ -1821,7 +1861,11 @@ public PreProcessor getPreProcessor(String typeName) { break; case POLICY_ENTITY_TYPE: - preProcessor = new AuthPolicyPreProcessor(graph, typeRegistry, entityRetriever, featureFlagStore); + preProcessor = new AuthPolicyPreProcessor(graph, typeRegistry, entityRetriever); + break; + + case STAKEHOLDER_ENTITY_TYPE: + preProcessor = new StakeholderPreProcessor(graph, typeRegistry, entityRetriever, this); break; case CONNECTION_ENTITY_TYPE: @@ -1835,6 +1879,14 @@ public PreProcessor getPreProcessor(String typeName) { case README_ENTITY_TYPE: preProcessor = new ReadmePreProcessor(typeRegistry, entityRetriever); break; + + case CONTRACT_ENTITY_TYPE: + preProcessor = new ContractPreProcessor(graph, typeRegistry, entityRetriever, storeDifferentialAudits, discovery); + break; + + case STAKEHOLDER_TITLE_ENTITY_TYPE: + preProcessor = new StakeholderTitlePreProcessor(graph, typeRegistry, entityRetriever); + break; } return preProcessor; @@ -2656,6 +2708,35 @@ private void repairMeanings(AtlasVertex assetVertex) { } } + @Override + public void repairAccesscontrolAlias(String guid) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("repairAlias"); + // Fetch accesscontrolEntity with extInfo + AtlasEntity.AtlasEntityWithExtInfo accesscontrolEntity = entityRetriever.toAtlasEntityWithExtInfo(guid); + + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_UPDATE, new AtlasEntityHeader(accesscontrolEntity.getEntity()))); + + // Validate accesscontrolEntity status + if (accesscontrolEntity.getEntity().getStatus() != ACTIVE) { + throw new AtlasBaseException(AtlasErrorCode.INSTANCE_GUID_DELETED, guid); + } + + // Validate accesscontrolEntity type + String entityType = accesscontrolEntity.getEntity().getTypeName(); + if (!PERSONA_ENTITY_TYPE.equals(entityType)) { + throw new AtlasBaseException(AtlasErrorCode.OPERATION_NOT_SUPPORTED, entityType); + } + + List policies = (List) accesscontrolEntity.getEntity().getRelationshipAttribute(REL_ATTR_POLICIES); + for (AtlasObjectId policy : policies) { + accesscontrolEntity.addReferredEntity(entityRetriever.toAtlasEntity(policy)); + } + + // Rebuild alias + this.esAliasStore.updateAlias(accesscontrolEntity, null); + + RequestContext.get().endMetricRecord(metric); + } } diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasGraphUtilsV2.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasGraphUtilsV2.java index 5f97d5645cd..42d30d39ca4 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasGraphUtilsV2.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasGraphUtilsV2.java @@ -35,8 +35,9 @@ import org.apache.atlas.type.AtlasEntityType; import org.apache.atlas.type.AtlasEnumType; import org.apache.atlas.type.AtlasStructType; -import org.apache.atlas.type.AtlasStructType.AtlasAttribute; +import org.apache.atlas.type.AtlasTypeRegistry; import org.apache.atlas.type.AtlasType; +import org.apache.atlas.type.AtlasStructType.AtlasAttribute; import org.apache.atlas.util.FileUtils; import org.apache.atlas.utils.AtlasPerfMetrics; import org.apache.atlas.utils.AtlasPerfMetrics.MetricRecorder; @@ -349,14 +350,16 @@ public static AtlasVertex findByUniqueAttributes(AtlasGraph graph, AtlasEntityTy vertex = findByTypeAndUniquePropertyName(graph, typeName, uniqAttrValues); // if no instance of given typeName is found, try to find an instance of type's sub-type - if (vertex == null && !entitySubTypes.isEmpty()) { + // Added exception for few types to solve https://atlanhq.atlassian.net/browse/PLT-1638 + if (vertex == null && !entitySubTypes.isEmpty() && !AtlasTypeRegistry.TYPENAMES_TO_SKIP_SUPER_TYPE_CHECK.contains(typeName)) { vertex = findBySuperTypeAndUniquePropertyName(graph, typeName, uniqAttrValues); } } else { vertex = findByTypeAndPropertyName(graph, typeName, attrNameValues); // if no instance of given typeName is found, try to find an instance of type's sub-type - if (vertex == null && !entitySubTypes.isEmpty()) { + // Added exception for few types to solve https://atlanhq.atlassian.net/browse/PLT-1638 + if (vertex == null && !entitySubTypes.isEmpty() && !AtlasTypeRegistry.TYPENAMES_TO_SKIP_SUPER_TYPE_CHECK.contains(typeName)) { vertex = findBySuperTypeAndPropertyName(graph, typeName, attrNameValues); } } diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasRelationshipStoreV2.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasRelationshipStoreV2.java index 3e8c8b9e424..afdf2825f14 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasRelationshipStoreV2.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasRelationshipStoreV2.java @@ -68,12 +68,8 @@ import static org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags.NONE; import static org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags.ONE_TO_TWO; import static org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags.TWO_TO_ONE; -import static org.apache.atlas.repository.Constants.ENTITY_TYPE_PROPERTY_KEY; -import static org.apache.atlas.repository.Constants.HOME_ID_KEY; -import static org.apache.atlas.repository.Constants.PROVENANCE_TYPE_KEY; -import static org.apache.atlas.repository.Constants.RELATIONSHIPTYPE_TAG_PROPAGATION_KEY; -import static org.apache.atlas.repository.Constants.RELATIONSHIP_GUID_PROPERTY_KEY; -import static org.apache.atlas.repository.Constants.VERSION_PROPERTY_KEY; +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.graph.GraphHelper.getTypeName; import static org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2.*; import static org.apache.atlas.repository.store.graph.v2.tasks.ClassificationPropagateTaskFactory.CLASSIFICATION_PROPAGATION_RELATIONSHIP_UPDATE; @@ -104,6 +100,16 @@ public class AtlasRelationshipStoreV2 implements AtlasRelationshipStore { private static final String END_2_DOC_ID_KEY = "end2DocId"; private static final String ES_DOC_ID_MAP_KEY = "esDocIdMap"; + private static Set EXCLUDE_MUTATION_REL_TYPE_NAMES = new HashSet() {{ + add(REL_DOMAIN_TO_DOMAINS); + add(REL_DOMAIN_TO_PRODUCTS); + add(REL_DOMAIN_TO_STAKEHOLDERS); + add(REL_STAKEHOLDER_TITLE_TO_STAKEHOLDERS); + add(REL_POLICY_TO_ACCESS_CONTROL); + add(REL_DATA_PRODUCT_TO_OUTPUT_PORTS); + add(REL_DATA_PRODUCT_TO_INPUT_PORTS); + }}; + public enum RelationshipMutation { RELATIONSHIP_CREATE, RELATIONSHIP_UPDATE, @@ -129,6 +135,8 @@ public AtlasRelationship create(AtlasRelationship relationship) throws AtlasBase LOG.debug("==> create({})", relationship); } + validateRelationshipType(relationship.getTypeName()); + AtlasVertex end1Vertex = getVertexFromEndPoint(relationship.getEnd1()); AtlasVertex end2Vertex = getVertexFromEndPoint(relationship.getEnd2()); @@ -161,6 +169,8 @@ public AtlasRelationship update(AtlasRelationship relationship) throws AtlasBase AtlasVertex end1Vertex = edge.getOutVertex(); AtlasVertex end2Vertex = edge.getInVertex(); + validateRelationshipType(edgeType); + // update shouldn't change endType if (StringUtils.isNotEmpty(relationship.getTypeName()) && !StringUtils.equalsIgnoreCase(edgeType, relationship.getTypeName())) { throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIP_UPDATE_TYPE_CHANGE_NOT_ALLOWED, guid, edgeType, relationship.getTypeName()); @@ -320,6 +330,8 @@ public void deleteByIds(List guids) throws AtlasBaseException { throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIP_ALREADY_DELETED, guid); } + validateRelationshipType(getTypeName(edge)); + edgesToDelete.add(edge); AtlasRelationship relationshipToDelete = entityRetriever.mapEdgeToAtlasRelationship(edge); deletedRelationships.add(relationshipToDelete); @@ -368,6 +380,9 @@ public void deleteById(String guid, boolean forceDelete) throws AtlasBaseExcepti if (getState(edge) == DELETED) { throw new AtlasBaseException(AtlasErrorCode.RELATIONSHIP_ALREADY_DELETED, guid); } + + validateRelationshipType(getTypeName(edge)); + deleteDelegate.getHandler().resetHasLineageOnInputOutputDelete(Collections.singleton(edge), null); deleteDelegate.getHandler().deleteRelationships(Collections.singleton(edge), forceDelete); @@ -999,4 +1014,11 @@ private static void setEdgeVertexIdsInContext(AtlasEdge edge) { RequestContext.get().addRelationshipEndToVertexIdMapping(GraphHelper.getAtlasObjectIdForOutVertex(edge), edge.getOutVertex().getId()); RequestContext.get().addRelationshipEndToVertexIdMapping(GraphHelper.getAtlasObjectIdForInVertex(edge), edge.getInVertex().getId()); } + + private static void validateRelationshipType(String relationshipTypeName) throws AtlasBaseException { + if (EXCLUDE_MUTATION_REL_TYPE_NAMES.contains(relationshipTypeName)) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, + String.format("Mutating relationship of type %s is not supported via relationship APIs, please use entity APIs", relationshipTypeName)); + } + } } \ No newline at end of file diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/DataMeshQNMigrationService.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/DataMeshQNMigrationService.java new file mode 100644 index 00000000000..7341e0703f8 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/DataMeshQNMigrationService.java @@ -0,0 +1,464 @@ +package org.apache.atlas.repository.store.graph.v2; + +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.RequestContext; +import org.apache.atlas.discovery.EntityDiscoveryService; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.discovery.IndexSearchParams; +import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasEntityHeader; +import org.apache.atlas.repository.graph.GraphHelper; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.store.graph.AtlasEntityStore; +import org.apache.atlas.service.redis.RedisService; +import org.apache.atlas.type.AtlasEntityType; +import org.apache.atlas.type.AtlasType; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.util.NanoIdUtils; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.Constants.POLICY_ENTITY_TYPE; +import static org.apache.atlas.repository.graph.GraphHelper.getAllChildrenVertices; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; +import static org.apache.atlas.repository.util.AccessControlUtils.ATTR_POLICY_CATEGORY; +import static org.apache.atlas.repository.util.AccessControlUtils.ATTR_POLICY_RESOURCES; + +public class DataMeshQNMigrationService implements MigrationService { + + private static final Logger LOG = LoggerFactory.getLogger(DataMeshQNMigrationService.class); + + private final AtlasEntityStore entityStore; + private final EntityDiscoveryService discovery; + private final EntityGraphRetriever entityRetriever; + + private final AtlasTypeRegistry typeRegistry; + private final RedisService redisService; + private Map updatedPolicyResources; + + private final int BATCH_SIZE = 20; + + boolean errorOccured = false; + + boolean skipSuperDomain = false; + + private int counter; + private boolean forceRegen; + private final TransactionInterceptHelper transactionInterceptHelper; + + public DataMeshQNMigrationService(AtlasEntityStore entityStore, EntityDiscoveryService discovery, EntityGraphRetriever entityRetriever, AtlasTypeRegistry typeRegistry, TransactionInterceptHelper transactionInterceptHelper, RedisService redisService, boolean forceRegen) { + this.entityRetriever = entityRetriever; + this.entityStore = entityStore; + this.discovery = discovery; + this.typeRegistry = typeRegistry; + this.redisService = redisService; + this.transactionInterceptHelper = transactionInterceptHelper; + this.forceRegen = forceRegen; + + this.updatedPolicyResources = new HashMap<>(); + this.counter = 0; + } + + public void startMigration() throws Exception { + try { + redisService.putValue(DATA_MESH_QN, MigrationStatus.IN_PROGRESS.name()); + + Set attributes = new HashSet<>(Arrays.asList(SUPER_DOMAIN_QN_ATTR, PARENT_DOMAIN_QN_ATTR, "__customAttributes")); + + List entities = getEntity(DATA_DOMAIN_ENTITY_TYPE, attributes, null); + + for (AtlasEntityHeader superDomain: entities) { + skipSuperDomain = false; + updateChunk(superDomain); + } + } catch (Exception e) { + LOG.error("Migration failed", e); + redisService.putValue(DATA_MESH_QN, MigrationStatus.FAILED.name()); + throw e; + } + + redisService.putValue(DATA_MESH_QN, MigrationStatus.SUCCESSFUL.name()); + } + + private void updateChunk(AtlasEntityHeader atlasEntity) throws AtlasBaseException { + AtlasVertex vertex = entityRetriever.getEntityVertex(atlasEntity.getGuid()); + String qualifiedName = (String) atlasEntity.getAttribute(QUALIFIED_NAME); + + try{ + migrateDomainAttributes(vertex, "", ""); + + if (counter > 0) { + commitChanges(); + } + + } catch (AtlasBaseException e){ + this.errorOccured = true; + LOG.error("Error while migrating qualified name for entity: {}", qualifiedName, e); + } + } + + private void migrateDomainAttributes(AtlasVertex vertex, String parentDomainQualifiedName, String superDomainQualifiedName) throws AtlasBaseException { + if(skipSuperDomain) { + return; + } + + String currentQualifiedName = vertex.getProperty(QUALIFIED_NAME,String.class); + String updatedQualifiedName = createDomainQualifiedName(parentDomainQualifiedName); + + Map updatedAttributes = new HashMap<>(); + + Map customAttributes = GraphHelper.getCustomAttributes(vertex); + if(!this.forceRegen && customAttributes != null && customAttributes.get(MIGRATION_CUSTOM_ATTRIBUTE) != null && customAttributes.get(MIGRATION_CUSTOM_ATTRIBUTE).equals("true")){ + LOG.info("Entity already migrated: {}", currentQualifiedName); + + updatedQualifiedName = vertex.getProperty(QUALIFIED_NAME,String.class); + + if (StringUtils.isEmpty(superDomainQualifiedName)) { + superDomainQualifiedName = vertex.getProperty(QUALIFIED_NAME,String.class); + } + + } else { + counter++; + LOG.info("Migrating qualified name for Domain: {} to {}", currentQualifiedName, updatedQualifiedName); + superDomainQualifiedName = commitChangesInMemory(currentQualifiedName, updatedQualifiedName, parentDomainQualifiedName, superDomainQualifiedName, vertex, updatedAttributes); + } + + if (!skipSuperDomain) { + Iterator products = getAllChildrenVertices(vertex, DATA_PRODUCT_EDGE_LABEL); + List productsList = new ArrayList<>(); + products.forEachRemaining(productsList::add); + + for (AtlasVertex productVertex : productsList) { + if (Objects.nonNull(productVertex)) { + migrateDataProductAttributes(productVertex, updatedQualifiedName, superDomainQualifiedName); + } else { + LOG.warn("Found null product vertex"); + } + + if (skipSuperDomain) { + break; + } + } + + // Get all children domains of current domain + Iterator childDomains = getAllChildrenVertices(vertex, DOMAIN_PARENT_EDGE_LABEL); + List childDomainsList = new ArrayList<>(); + childDomains.forEachRemaining(childDomainsList::add); + + for (AtlasVertex childVertex : childDomainsList) { + if (Objects.nonNull(childVertex)) { + migrateDomainAttributes(childVertex, updatedQualifiedName, superDomainQualifiedName); + } else { + LOG.warn("Found null sub-domain vertex"); + } + + if (skipSuperDomain) { + break; + } + } + + recordUpdatedChildEntities(vertex, updatedAttributes); + if (counter >= BATCH_SIZE) { + commitChanges(); + } + } + } + + public void commitChanges() throws AtlasBaseException { + try { + updatePolicy(this.updatedPolicyResources); + } catch (AtlasBaseException e) { + this.errorOccured = true; + this.skipSuperDomain = true; + LOG.error("Failed to update set of policies: ", e); + LOG.error("Failed policies: {}", AtlasType.toJson(this.updatedPolicyResources)); + throw e; + } finally { + this.updatedPolicyResources.clear(); + } + + try { + transactionInterceptHelper.intercept(); + LOG.info("Committed a batch to the graph"); + } catch (Exception e){ + this.skipSuperDomain = true; + this.errorOccured = true; + LOG.error("Failed to commit set of assets: ", e); + throw e; + } finally { + this.counter = 0; + } + } + + public String commitChangesInMemory(String currentQualifiedName, String updatedQualifiedName, String parentDomainQualifiedName, String superDomainQualifiedName, AtlasVertex vertex, Map updatedAttributes) { + + if(skipSuperDomain) { + return ""; + } + + vertex.setProperty(QUALIFIED_NAME, updatedQualifiedName); + + if (StringUtils.isEmpty(parentDomainQualifiedName) && StringUtils.isEmpty(superDomainQualifiedName)){ + superDomainQualifiedName = updatedQualifiedName; + } else{ + vertex.setProperty(PARENT_DOMAIN_QN_ATTR, parentDomainQualifiedName); + vertex.setProperty(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + } + + updatedAttributes.put(QUALIFIED_NAME, updatedQualifiedName); + + //Store domainPolicies and resources to be updated + String currentResource = "entity:"+ currentQualifiedName; + String updatedResource = "entity:"+ updatedQualifiedName; + this.updatedPolicyResources.put(currentResource, updatedResource); + + Map customAttributes = GraphHelper.getCustomAttributes(vertex); + if(Objects.isNull(customAttributes) || MapUtils.isEmpty(customAttributes)) { + customAttributes = new HashMap<>(); + } + customAttributes.put(MIGRATION_CUSTOM_ATTRIBUTE, "true"); + vertex.setProperty(CUSTOM_ATTRIBUTES_PROPERTY_KEY, AtlasEntityType.toJson(customAttributes)); + + return superDomainQualifiedName; + } + + + private void migrateDataProductAttributes(AtlasVertex vertex, String parentDomainQualifiedName, String superDomainQualifiedName) throws AtlasBaseException { + if(skipSuperDomain) { + return; + } + + String currentQualifiedName = vertex.getProperty(QUALIFIED_NAME,String.class); + String updatedQualifiedName = createProductQualifiedName(parentDomainQualifiedName); + + Map customAttributes = GraphHelper.getCustomAttributes(vertex); + + if(!this.forceRegen && customAttributes != null && customAttributes.get(MIGRATION_CUSTOM_ATTRIBUTE) != null && customAttributes.get(MIGRATION_CUSTOM_ATTRIBUTE).equals("true")) { + LOG.info("Product already migrated: {}", currentQualifiedName); + + } else { + counter++; + LOG.info("Migrating qualified name for Product: {} to {}", currentQualifiedName, updatedQualifiedName); + vertex.setProperty(QUALIFIED_NAME, updatedQualifiedName); + + //Store domainPolicies and resources to be updated + String currentResource = "entity:" + currentQualifiedName; + String updatedResource = "entity:" + updatedQualifiedName; + this.updatedPolicyResources.put(currentResource, updatedResource); + + vertex.setProperty(PARENT_DOMAIN_QN_ATTR, parentDomainQualifiedName); + vertex.setProperty(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + + if(Objects.isNull(customAttributes) || MapUtils.isEmpty(customAttributes)) { + customAttributes = new HashMap<>(); + } + customAttributes.put(MIGRATION_CUSTOM_ATTRIBUTE, "true"); + vertex.setProperty(CUSTOM_ATTRIBUTES_PROPERTY_KEY, AtlasEntityType.toJson(customAttributes)); + } + + if(counter >= BATCH_SIZE){ + commitChanges(); + } + } + + protected void updatePolicy(Map updatedPolicyResources) throws AtlasBaseException { + if(skipSuperDomain) { + return; + } + + List currentResources = new ArrayList<>(updatedPolicyResources.keySet()); + LOG.info("Updating policies for entities {}", currentResources); + Map updatedAttributes = new HashMap<>(); + + List policies = getEntity(POLICY_ENTITY_TYPE,new HashSet<>(Arrays.asList(ATTR_POLICY_RESOURCES, ATTR_POLICY_CATEGORY)), currentResources); + if (CollectionUtils.isNotEmpty(policies)) { + int batchSize = BATCH_SIZE; + int totalPolicies = policies.size(); + + for (int i = 0; i < totalPolicies; i += batchSize) { + List entityList = new ArrayList<>(); + List batch = policies.subList(i, Math.min(i + batchSize, totalPolicies)); + + for (AtlasEntityHeader policy : batch) { + AtlasVertex policyVertex = entityRetriever.getEntityVertex(policy.getGuid()); + AtlasEntity policyEntity = entityRetriever.toAtlasEntity(policyVertex); + + List policyResources = (List) policyEntity.getAttribute(ATTR_POLICY_RESOURCES); + List updatedPolicyResourcesList = new ArrayList<>(); + + for (String resource : policyResources) { + if (updatedPolicyResources.containsKey(resource)) { + updatedPolicyResourcesList.add(updatedPolicyResources.get(resource)); + } else { + updatedPolicyResourcesList.add(resource); + } + } + updatedAttributes.put(ATTR_POLICY_RESOURCES, updatedPolicyResourcesList); + + policyEntity.setAttribute(ATTR_POLICY_RESOURCES, updatedPolicyResourcesList); + entityList.add(policyEntity); + recordUpdatedChildEntities(policyVertex, updatedAttributes); + } + + EntityStream entityStream = new AtlasEntityStream(entityList); + entityStore.createOrUpdate(entityStream, false); + } + } + } + + private static String createDomainQualifiedName(String parentDomainQualifiedName) { + if (StringUtils.isNotEmpty(parentDomainQualifiedName)) { + return parentDomainQualifiedName + "/domain/" + getUUID(); + } else{ + return "default/domain" + "/" + getUUID() + "/super"; + } + } + + private static String createProductQualifiedName(String parentDomainQualifiedName) throws AtlasBaseException { + if (StringUtils.isEmpty(parentDomainQualifiedName)) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Parent Domain Qualified Name cannot be empty or null"); + } + return parentDomainQualifiedName + "/product/" + getUUID(); + } + + public static String getUUID(){ + return NanoIdUtils.randomNanoId(); + } + + public List getEntity(String entityType, Set attributes, List resource) throws AtlasBaseException { + + List> mustClauseList = new ArrayList<>(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", entityType))); + + if(entityType.equals(DATA_DOMAIN_ENTITY_TYPE)){ + Map childBool = new HashMap<>(); + List > mustNotClauseList = new ArrayList<>(); + mustNotClauseList.add(mapOf("exists", mapOf("field", PARENT_DOMAIN_QN_ATTR))); + + Map shouldBool = new HashMap<>(); + shouldBool.put("must_not", mustNotClauseList); + + List > shouldClauseList = new ArrayList<>(); + shouldClauseList.add(mapOf("bool", shouldBool)); + + childBool.put("should", shouldClauseList); + mustClauseList.add(mapOf("bool", childBool)); + } + + if(entityType.equals(POLICY_ENTITY_TYPE)){ + mustClauseList.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustClauseList.add(mapOf("terms", mapOf("policyResources", resource))); + } + + Map bool = new HashMap<>(); + bool.put("must", mustClauseList); + + Map dsl = mapOf("query", mapOf("bool", bool)); + + List> sortList = new ArrayList<>(); + Map sortField = new HashMap<>(); + sortField.put("__timestamp", mapOf("order", "DESC")); + sortList.add(sortField); + dsl.put("sort", sortList); + + + List entities = indexSearchPaginated(dsl, attributes, discovery); + + return entities; + } + + public static List indexSearchPaginated(Map dsl, Set attributes, EntityDiscoveryService discovery) throws AtlasBaseException { + IndexSearchParams searchParams = new IndexSearchParams(); + List ret = new ArrayList<>(); + + List sortList = new ArrayList<>(0); + sortList.add(mapOf("__timestamp", mapOf("order", "asc"))); + sortList.add(mapOf("__guid", mapOf("order", "asc"))); + dsl.put("sort", sortList); + + int from = 0; + int size = 100; + boolean hasMore = true; + do { + dsl.put("from", from); + dsl.put("size", size); + searchParams.setDsl(dsl); + + if (CollectionUtils.isNotEmpty(attributes)) { + searchParams.setAttributes(attributes); + } + + List headers = discovery.directIndexSearch(searchParams).getEntities(); + + if (CollectionUtils.isNotEmpty(headers)) { + ret.addAll(headers); + } else { + hasMore = false; + } + + from += size; + + } while (hasMore); + + return ret; + } + + /** + * Record the updated child entities, it will be used to send notification and store audit logs + * @param entityVertex Child entity vertex + * @param updatedAttributes Updated attributes while updating required attributes on updating collection + */ + protected void recordUpdatedChildEntities(AtlasVertex entityVertex, Map updatedAttributes) { + RequestContext requestContext = RequestContext.get(); + + AtlasEntity entity = new AtlasEntity(); + entity = entityRetriever.mapSystemAttributes(entityVertex, entity); + entity.setAttributes(updatedAttributes); + requestContext.cacheDifferentialEntity(new AtlasEntity(entity)); + + AtlasEntityType entityType = typeRegistry.getEntityTypeByName(entity.getTypeName()); + + //Add the min info attributes to entity header to be sent as part of notification + if(entityType != null) { + AtlasEntity finalEntity = entity; + entityType.getMinInfoAttributes().values().stream().filter(attribute -> !updatedAttributes.containsKey(attribute.getName())).forEach(attribute -> { + Object attrValue = null; + try { + attrValue = entityRetriever.getVertexAttribute(entityVertex, attribute); + } catch (AtlasBaseException e) { + this.errorOccured = true; + LOG.error("Error while getting vertex attribute", e); + } + if(attrValue != null) { + finalEntity.setAttribute(attribute.getName(), attrValue); + } + }); + requestContext.recordEntityUpdate(new AtlasEntityHeader(finalEntity)); + } + + } + + public static Map mapOf(String key, Object value) { + Map map = new HashMap<>(); + map.put(key, value); + return map; + } + + @Override + public void run() { + try { + LOG.info("Starting migration: {}", DATA_MESH_QN); + startMigration(); + LOG.info("Finished migration: {}", DATA_MESH_QN); + } catch (Exception e) { + LOG.error("Error running migration : {}",e.toString()); + throw new RuntimeException(e); + } + } +} diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/DataProductInputsOutputsMigrationService.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/DataProductInputsOutputsMigrationService.java new file mode 100644 index 00000000000..2f33a32481d --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/DataProductInputsOutputsMigrationService.java @@ -0,0 +1,101 @@ +package org.apache.atlas.repository.store.graph.v2; + +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.repository.graph.GraphHelper; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.commons.collections.CollectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; + +public class DataProductInputsOutputsMigrationService { + + private static final Logger LOG = LoggerFactory.getLogger(DataProductInputsOutputsMigrationService.class); + + private final EntityGraphRetriever entityRetriever; + + + private String productGuid; + private final TransactionInterceptHelper transactionInterceptHelper; + + public DataProductInputsOutputsMigrationService(EntityGraphRetriever entityRetriever, String productGuid, TransactionInterceptHelper transactionInterceptHelper) { + this.entityRetriever = entityRetriever; + this.transactionInterceptHelper = transactionInterceptHelper; + this.productGuid = productGuid; + } + + public void migrateProduct() throws Exception { + try { + AtlasVertex productVertex = entityRetriever.getEntityVertex(this.productGuid); + + boolean isCommitRequired = migrateAttr(productVertex); + if (isCommitRequired){ + LOG.info("Committing changes for Product: {}", this.productGuid); + commitChanges(); + } + else { + LOG.info("No changes to commit for Product: {} as no migration needed", this.productGuid); + } + + } catch (Exception e) { + LOG.error("Error while migration inputs/outputs for Dataproduct: {}", this.productGuid, e); + throw e; + } + } + + private boolean migrateAttr(AtlasVertex vertex) throws AtlasBaseException { + boolean isCommitRequired = false; + + List outputPortsRelationGuids = getAssetGuids(vertex, OUTPUT_PORT_PRODUCT_EDGE_LABEL); + List outputPortGuidsAttr = vertex.getMultiValuedProperty(OUTPUT_PORT_GUIDS_ATTR, String.class); + + + List inputPortsRelationGuids = getAssetGuids(vertex, INPUT_PORT_PRODUCT_EDGE_LABEL); + List inputPortGuidsAttr = vertex.getMultiValuedProperty(INPUT_PORT_GUIDS_ATTR, String.class); + + if(!CollectionUtils.isEqualCollection(outputPortsRelationGuids, outputPortGuidsAttr)) { + LOG.info("Migrating outputPort guid attribute: {} for Product: {}", OUTPUT_PORT_GUIDS_ATTR, this.productGuid); + addInternalAttr(vertex, OUTPUT_PORT_GUIDS_ATTR, outputPortsRelationGuids); + isCommitRequired = true; + } + + if(!CollectionUtils.isEqualCollection(inputPortsRelationGuids, inputPortGuidsAttr)) { + LOG.info("Migrating inputPort guid attribute: {} for Product: {}", INPUT_PORT_GUIDS_ATTR, this.productGuid); + addInternalAttr(vertex, INPUT_PORT_GUIDS_ATTR, inputPortsRelationGuids); + isCommitRequired = true; + } + + return isCommitRequired; + } + + public void commitChanges() throws AtlasBaseException { + try { + transactionInterceptHelper.intercept(); + LOG.info("Committed a entity to the graph"); + } catch (Exception e){ + LOG.error("Failed to commit asset: ", e); + throw e; + } + } + + private List getAssetGuids(AtlasVertex vertex, String edgeLabel) throws AtlasBaseException { + List guids = new ArrayList<>(); + Iterator activeParent = GraphHelper.getActiveParentVertices(vertex, edgeLabel); + while(activeParent.hasNext()) { + AtlasVertex child = activeParent.next(); + guids.add(child.getProperty(GUID_PROPERTY_KEY, String.class)); + } + return guids; + } + + private void addInternalAttr(AtlasVertex productVertex, String internalAttr, List currentGuids){ + productVertex.removeProperty(internalAttr); + if (CollectionUtils.isNotEmpty(currentGuids)) { + currentGuids.forEach(guid -> AtlasGraphUtilsV2.addEncodedProperty(productVertex, internalAttr , guid)); + } + } +} \ No newline at end of file diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphMapper.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphMapper.java index a95ca20f0ca..596420696d4 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphMapper.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphMapper.java @@ -137,8 +137,9 @@ import static org.apache.atlas.repository.graph.GraphHelper.getPropagatableClassifications; import static org.apache.atlas.repository.graph.GraphHelper.getClassificationEntityGuid; import static org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2.*; -import static org.apache.atlas.repository.store.graph.v2.tasks.ClassificationPropagateTaskFactory.CLASSIFICATION_PROPAGATION_ADD; -import static org.apache.atlas.repository.store.graph.v2.tasks.ClassificationPropagateTaskFactory.CLASSIFICATION_PROPAGATION_DELETE; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.INPUT_PORT_GUIDS_ATTR; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.OUTPUT_PORT_GUIDS_ATTR; +import static org.apache.atlas.repository.store.graph.v2.tasks.ClassificationPropagateTaskFactory.*; import static org.apache.atlas.type.AtlasStructType.AtlasAttribute.AtlasRelationshipEdgeDirection.IN; import static org.apache.atlas.type.AtlasStructType.AtlasAttribute.AtlasRelationshipEdgeDirection.OUT; import static org.apache.atlas.type.Constants.PENDING_TASKS_PROPERTY_KEY; @@ -170,6 +171,7 @@ public class EntityGraphMapper { private static final String TYPE_GLOSSARY= "AtlasGlossary"; private static final String TYPE_CATEGORY= "AtlasGlossaryCategory"; private static final String TYPE_TERM = "AtlasGlossaryTerm"; + private static final String TYPE_PRODUCT = "DataProduct"; private static final String TYPE_PROCESS = "Process"; private static final String ATTR_MEANINGS = "meanings"; private static final String ATTR_ANCHOR = "anchor"; @@ -222,8 +224,7 @@ public EntityGraphMapper(DeleteHandlerDelegate deleteDelegate, RestoreHandlerV1 this.entityRetriever = new EntityGraphRetriever(graph, typeRegistry); this.fullTextMapperV2 = fullTextMapperV2; this.taskManagement = taskManagement; - this.transactionInterceptHelper = transactionInterceptHelper; - } + this.transactionInterceptHelper = transactionInterceptHelper;} @VisibleForTesting public void setTasksUseFlag(boolean value) { @@ -1906,7 +1907,7 @@ public List mapArrayValue(AttributeMutationContext ctx, EntityMutationContext co AtlasAttribute inverseRefAttribute = attribute.getInverseRefAttribute(); Cardinality cardinality = attribute.getAttributeDef().getCardinality(); List removedElements = new ArrayList<>(); - List newElementsCreated = new ArrayList<>(); + List newElementsCreated = new ArrayList<>(); List allArrayElements = null; List currentElements; boolean deleteExistingRelations = shouldDeleteExistingRelations(ctx, attribute); @@ -2004,6 +2005,11 @@ public List mapArrayValue(AttributeMutationContext ctx, EntityMutationContext co case PROCESS_INPUTS: case PROCESS_OUTPUTS: addEdgesToContext(GraphHelper.getGuid(ctx.referringVertex), newElementsCreated, removedElements); break; + + case INPUT_PORT_PRODUCT_EDGE_LABEL: + case OUTPUT_PORT_PRODUCT_EDGE_LABEL: + addInternalProductAttr(ctx, newElementsCreated, removedElements); + break; } if (LOG.isDebugEnabled()) { @@ -2089,6 +2095,11 @@ public List appendArrayValue(AttributeMutationContext ctx, EntityMutationContext case PROCESS_INPUTS: case PROCESS_OUTPUTS: addEdgesToContext(GraphHelper.getGuid(ctx.referringVertex), newElementsCreated, new ArrayList<>(0)); break; + + case INPUT_PORT_PRODUCT_EDGE_LABEL: + case OUTPUT_PORT_PRODUCT_EDGE_LABEL: + addInternalProductAttr(ctx, newElementsCreated, null); + break; } if (LOG.isDebugEnabled()) { @@ -2156,6 +2167,11 @@ public List removeArrayValue(AttributeMutationContext ctx, EntityMutationContext case PROCESS_INPUTS: case PROCESS_OUTPUTS: addEdgesToContext(GraphHelper.getGuid(ctx.referringVertex), new ArrayList<>(0), removedElements); break; + + case INPUT_PORT_PRODUCT_EDGE_LABEL: + case OUTPUT_PORT_PRODUCT_EDGE_LABEL: + addInternalProductAttr(ctx, null , removedElements); + break; } if (LOG.isDebugEnabled()) { @@ -2193,6 +2209,40 @@ private void addEdgesToContext(String guid, List newElementsCreated, Lis } } + private void addInternalProductAttr(AttributeMutationContext ctx, List createdElements, List deletedElements) throws AtlasBaseException { + MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("addInternalProductAttrForAppend"); + AtlasVertex toVertex = ctx.getReferringVertex(); + String toVertexType = getTypeName(toVertex); + + if (CollectionUtils.isEmpty(createdElements) && CollectionUtils.isEmpty(deletedElements)){ + RequestContext.get().endMetricRecord(metricRecorder); + return; + } + + if (TYPE_PRODUCT.equals(toVertexType)) { + String attrName = ctx.getAttribute().getRelationshipEdgeLabel().equals(OUTPUT_PORT_PRODUCT_EDGE_LABEL) + ? OUTPUT_PORT_GUIDS_ATTR + : INPUT_PORT_GUIDS_ATTR; + + addOrRemoveDaapInternalAttr(toVertex, attrName, createdElements, deletedElements); + }else{ + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Can not update product relations while updating any asset"); + } + RequestContext.get().endMetricRecord(metricRecorder); + } + + private void addOrRemoveDaapInternalAttr(AtlasVertex toVertex, String internalAttr, List createdElements, List deletedElements) { + if (CollectionUtils.isNotEmpty(createdElements)) { + List addedGuids = createdElements.stream().map(x -> ((AtlasEdge) x).getOutVertex().getProperty("__guid", String.class)).collect(Collectors.toList()); + addedGuids.forEach(guid -> AtlasGraphUtilsV2.addEncodedProperty(toVertex, internalAttr, guid)); + } + + if (CollectionUtils.isNotEmpty(deletedElements)) { + List removedGuids = deletedElements.stream().map(x -> x.getOutVertex().getProperty("__guid", String.class)).collect(Collectors.toList()); + removedGuids.forEach(guid -> AtlasGraphUtilsV2.removeItemFromListPropertyValue(toVertex, internalAttr, guid)); + } + } + private boolean shouldDeleteExistingRelations(AttributeMutationContext ctx, AtlasAttribute attribute) { boolean ret = false; AtlasEntityType entityType = typeRegistry.getEntityTypeByName(AtlasGraphUtilsV2.getTypeName(ctx.getReferringVertex())); @@ -2981,6 +3031,87 @@ private void updateInConsistentOwnedMapVertices(AttributeMutationContext ctx, At } } + public void cleanUpClassificationPropagation(String classificationName) throws AtlasBaseException { + List vertices = GraphHelper.getAllAssetsWithClassificationAttached(graph, classificationName); + int totalVertexSize = vertices.size(); + LOG.info("To clean up tag {} from {} entities", classificationName, totalVertexSize); + int toIndex; + int offset = 0; + do { + toIndex = Math.min((offset + CHUNK_SIZE), totalVertexSize); + List entityVertices = vertices.subList(offset, toIndex); + List impactedGuids = entityVertices.stream().map(GraphHelper::getGuid).collect(Collectors.toList()); + try { + GraphTransactionInterceptor.lockObjectAndReleasePostCommit(impactedGuids); + for (AtlasVertex vertex : entityVertices) { + List deletedClassifications = new ArrayList<>(); + List classificationEdges = GraphHelper.getClassificationEdges(vertex, null, classificationName); + for (AtlasEdge edge : classificationEdges) { + AtlasClassification classification = entityRetriever.toAtlasClassification(edge.getInVertex()); + deletedClassifications.add(classification); + deleteDelegate.getHandler().deleteEdgeReference(edge, TypeCategory.CLASSIFICATION, false, true, null, vertex); + } + + AtlasEntity entity = repairClassificationMappings(vertex); + + entityChangeNotifier.onClassificationDeletedFromEntity(entity, deletedClassifications); + } + offset += CHUNK_SIZE; + } finally { + transactionInterceptHelper.intercept(); + LOG.info("Cleaned up {} entities for classification {}", offset, classificationName); + } + + } while (offset < totalVertexSize); + // Fetch all classificationVertex by classificationName and delete them if remaining + List classificationVertices = GraphHelper.getAllClassificationVerticesByClassificationName(graph, classificationName); + for (AtlasVertex classificationVertex : classificationVertices) { + deleteDelegate.getHandler().deleteClassificationVertex(classificationVertex, true); + } + transactionInterceptHelper.intercept(); + LOG.info("Completed cleaning up classification {}", classificationName); + } + + public AtlasEntity repairClassificationMappings(AtlasVertex entityVertex) throws AtlasBaseException { + String guid = GraphHelper.getGuid(entityVertex); + AtlasEntity entity = instanceConverter.getEntity(guid, ENTITY_CHANGE_NOTIFY_IGNORE_RELATIONSHIP_ATTRIBUTES); + + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_UPDATE_CLASSIFICATION, new AtlasEntityHeader(entity)), "repair classification mappings: guid=", guid); + List classificationNames = new ArrayList<>(); + List propagatedClassificationNames = new ArrayList<>(); + + if (entity.getClassifications() != null) { + List classifications = entity.getClassifications(); + for (AtlasClassification classification : classifications) { + if (isPropagatedClassification(classification, guid)) { + propagatedClassificationNames.add(classification.getTypeName()); + } else { + classificationNames.add(classification.getTypeName()); + } + } + } + //Delete array/set properties first + entityVertex.removeProperty(TRAIT_NAMES_PROPERTY_KEY); + entityVertex.removeProperty(PROPAGATED_TRAIT_NAMES_PROPERTY_KEY); + + + //Update classificationNames and propagatedClassificationNames in entityVertex + entityVertex.setProperty(CLASSIFICATION_NAMES_KEY, getDelimitedClassificationNames(classificationNames)); + entityVertex.setProperty(PROPAGATED_CLASSIFICATION_NAMES_KEY, getDelimitedClassificationNames(propagatedClassificationNames)); + entityVertex.setProperty(CLASSIFICATION_TEXT_KEY, fullTextMapperV2.getClassificationTextForEntity(entity)); + // Make classificationNames unique list as it is of type SET + classificationNames = classificationNames.stream().distinct().collect(Collectors.toList()); + //Update classificationNames and propagatedClassificationNames in entityHeader + for(String classificationName : classificationNames) { + AtlasGraphUtilsV2.addEncodedProperty(entityVertex, TRAIT_NAMES_PROPERTY_KEY, classificationName); + } + for (String classificationName : propagatedClassificationNames) { + entityVertex.addListProperty(PROPAGATED_TRAIT_NAMES_PROPERTY_KEY, classificationName); + } + + return entity; + } + public void addClassifications(final EntityMutationContext context, String guid, List classifications) throws AtlasBaseException { if (CollectionUtils.isNotEmpty(classifications)) { MetricRecorder metric = RequestContext.get().startMetricRecord("addClassifications"); @@ -3028,7 +3159,7 @@ public void addClassifications(final EntityMutationContext context, String guid, } if (restrictPropagationThroughHierarchy == null) { - classification.setRestrictPropagationThroughLineage(RESTRICT_PROPAGATION_THROUGH_HIERARCHY_DEFAULT); + classification.setRestrictPropagationThroughHierarchy(RESTRICT_PROPAGATION_THROUGH_HIERARCHY_DEFAULT); } // set associated entity id to classification @@ -3580,18 +3711,21 @@ public void updateClassifications(EntityMutationContext context, String guid, Li Boolean updatedRestrictPropagationThroughLineage = classification.getRestrictPropagationThroughLineage(); Boolean currentRestrictPropagationThroughHierarchy = currentClassification.getRestrictPropagationThroughHierarchy(); Boolean updatedRestrictPropagationThroughHierarchy = classification.getRestrictPropagationThroughHierarchy(); - + String propagationMode = entityRetriever.determinePropagationMode(updatedRestrictPropagationThroughLineage, updatedRestrictPropagationThroughHierarchy); if ((!Objects.equals(updatedRemovePropagations, currentRemovePropagations) || !Objects.equals(currentTagPropagation, updatedTagPropagation) || !Objects.equals(currentRestrictPropagationThroughLineage, updatedRestrictPropagationThroughLineage)) && taskManagement != null && DEFERRED_ACTION_ENABLED) { String propagationType = CLASSIFICATION_PROPAGATION_ADD; - if (removePropagation || !updatedTagPropagation) - { + if(currentRestrictPropagationThroughLineage != updatedRestrictPropagationThroughLineage || currentRestrictPropagationThroughHierarchy != updatedRestrictPropagationThroughHierarchy){ + propagationType = CLASSIFICATION_REFRESH_PROPAGATION; + } + if (removePropagation || !updatedTagPropagation) { propagationType = CLASSIFICATION_PROPAGATION_DELETE; } createAndQueueTask(propagationType, entityVertex, classificationVertex.getIdForDisplay(), currentRestrictPropagationThroughLineage,currentRestrictPropagationThroughHierarchy); + updatedTagPropagation = null; } // compute propagatedEntityVertices once and use it for subsequent iterations and notifications @@ -3605,11 +3739,7 @@ public void updateClassifications(EntityMutationContext context, String guid, Li deleteDelegate.getHandler().removeTagPropagation(classificationVertex); } if (CollectionUtils.isEmpty(entitiesToPropagateTo)) { - String propagationMode; - if (updatedRemovePropagations !=null) { - propagationMode = entityRetriever.determinePropagationMode(updatedRestrictPropagationThroughLineage, updatedRestrictPropagationThroughHierarchy); - } - else{ + if (updatedRemovePropagations ==null) { propagationMode = CLASSIFICATION_PROPAGATION_MODE_DEFAULT; } Boolean toExclude = propagationMode == CLASSIFICATION_VERTEX_RESTRICT_PROPAGATE_THROUGH_LINEAGE ? true : false; diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphRetriever.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphRetriever.java index 691b5ab75f3..90e041b4737 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphRetriever.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityGraphRetriever.java @@ -1019,8 +1019,13 @@ private AtlasEntityHeader mapVertexToAtlasEntityHeader(AtlasVertex entityVertex, ret.setTypeName(typeName); ret.setGuid(guid); ret.setStatus(GraphHelper.getStatus(entityVertex)); - if(RequestContext.get().includeClassifications()){ - ret.setClassificationNames(getAllTraitNames(entityVertex)); + RequestContext context = RequestContext.get(); + boolean includeClassifications = context.includeClassifications(); + boolean includeClassificationNames = context.isIncludeClassificationNames(); + if(includeClassifications){ + ret.setClassificationNames(getAllTraitNamesFromAttribute(entityVertex)); + } else if (!includeClassifications && includeClassificationNames) { + ret.setClassificationNames(getAllTraitNamesFromAttribute(entityVertex)); } ret.setIsIncomplete(isIncomplete); ret.setLabels(getLabels(entityVertex)); diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/MigrationService.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/MigrationService.java new file mode 100644 index 00000000000..9e93999cd96 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/MigrationService.java @@ -0,0 +1,5 @@ +package org.apache.atlas.repository.store.graph.v2; + +public interface MigrationService extends Runnable { + void startMigration() throws Exception; +} diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/AuthPolicyPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/AuthPolicyPreProcessor.java index acc28ba5fe3..62adf8119ad 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/AuthPolicyPreProcessor.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/AuthPolicyPreProcessor.java @@ -24,7 +24,6 @@ import org.apache.atlas.authorize.AtlasEntityAccessRequest; import org.apache.atlas.authorize.AtlasPrivilege; import org.apache.atlas.exception.AtlasBaseException; -import org.apache.atlas.featureflag.FeatureFlagStore; import org.apache.atlas.model.instance.AtlasEntity; import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; import org.apache.atlas.model.instance.AtlasEntityHeader; @@ -52,6 +51,7 @@ import static org.apache.atlas.AtlasErrorCode.BAD_REQUEST; import static org.apache.atlas.AtlasErrorCode.INSTANCE_BY_UNIQUE_ATTRIBUTE_NOT_FOUND; import static org.apache.atlas.AtlasErrorCode.INSTANCE_GUID_NOT_FOUND; +import static org.apache.atlas.AtlasErrorCode.OPERATION_NOT_SUPPORTED; import static org.apache.atlas.AtlasErrorCode.RESOURCE_NOT_FOUND; import static org.apache.atlas.AtlasErrorCode.UNAUTHORIZED_CONNECTION_ADMIN; import static org.apache.atlas.authorize.AtlasAuthorizationUtils.getCurrentUserName; @@ -61,6 +61,7 @@ import static org.apache.atlas.repository.Constants.ATTR_ADMIN_ROLES; import static org.apache.atlas.repository.Constants.KEYCLOAK_ROLE_ADMIN; import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; +import static org.apache.atlas.repository.Constants.STAKEHOLDER_ENTITY_TYPE; import static org.apache.atlas.repository.util.AccessControlUtils.*; import static org.apache.atlas.repository.util.AccessControlUtils.getPolicySubCategory; @@ -70,17 +71,14 @@ public class AuthPolicyPreProcessor implements PreProcessor { private final AtlasGraph graph; private final AtlasTypeRegistry typeRegistry; private final EntityGraphRetriever entityRetriever; - private final FeatureFlagStore featureFlagStore ; private IndexAliasStore aliasStore; public AuthPolicyPreProcessor(AtlasGraph graph, AtlasTypeRegistry typeRegistry, - EntityGraphRetriever entityRetriever, - FeatureFlagStore featureFlagStore) { + EntityGraphRetriever entityRetriever) { this.graph = graph; this.typeRegistry = typeRegistry; this.entityRetriever = entityRetriever; - this.featureFlagStore = featureFlagStore; aliasStore = new ESAliasStore(graph, entityRetriever); } @@ -108,6 +106,13 @@ private void processCreatePolicy(AtlasStruct entity) throws AtlasBaseException { AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processCreatePolicy"); AtlasEntity policy = (AtlasEntity) entity; + AtlasEntityWithExtInfo parent = getAccessControlEntity(policy); + AtlasEntity parentEntity = null; + if (parent != null) { + parentEntity = parent.getEntity(); + verifyParentTypeName(parentEntity); + } + String policyCategory = getPolicyCategory(policy); if (StringUtils.isEmpty(policyCategory)) { throw new AtlasBaseException(BAD_REQUEST, "Please provide attribute " + ATTR_POLICY_CATEGORY); @@ -117,9 +122,6 @@ private void processCreatePolicy(AtlasStruct entity) throws AtlasBaseException { AuthPolicyValidator validator = new AuthPolicyValidator(entityRetriever); if (POLICY_CATEGORY_PERSONA.equals(policyCategory)) { - AtlasEntityWithExtInfo parent = getAccessControlEntity(policy); - AtlasEntity parentEntity = parent.getEntity(); - String policySubCategory = getPolicySubCategory(policy); if (!POLICY_SUB_CATEGORY_DOMAIN.equals(policySubCategory)) { @@ -142,9 +144,6 @@ private void processCreatePolicy(AtlasStruct entity) throws AtlasBaseException { aliasStore.updateAlias(parent, policy); } else if (POLICY_CATEGORY_PURPOSE.equals(policyCategory)) { - AtlasEntityWithExtInfo parent = getAccessControlEntity(policy); - AtlasEntity parentEntity = parent.getEntity(); - policy.setAttribute(QUALIFIED_NAME, String.format("%s/%s", getEntityQualifiedName(parentEntity), getUUID())); validator.validate(policy, null, parentEntity, CREATE); @@ -323,4 +322,10 @@ private AtlasEntityWithExtInfo getAccessControlEntity(AtlasEntity entity) throws RequestContext.get().endMetricRecord(metricRecorder); return ret; } + + private void verifyParentTypeName(AtlasEntity parentEntity) throws AtlasBaseException { + if (parentEntity.getTypeName().equals(STAKEHOLDER_ENTITY_TYPE)) { + throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Updating policies for " + STAKEHOLDER_ENTITY_TYPE); + } + } } diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/ConnectionPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/ConnectionPreProcessor.java index 02fb63bbc81..9b0b83e8fd5 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/ConnectionPreProcessor.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/ConnectionPreProcessor.java @@ -6,9 +6,7 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * + * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -17,6 +15,7 @@ */ package org.apache.atlas.repository.store.graph.v2.preprocessor; +import org.apache.atlas.AtlasErrorCode; import org.apache.atlas.DeleteType; import org.apache.atlas.RequestContext; import org.apache.atlas.discovery.EntityDiscoveryService; @@ -47,22 +46,15 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.stream.Collectors; +import java.util.stream.Stream; +import static org.apache.atlas.auth.client.keycloak.AtlasKeycloakClient.getKeycloakClient; import static org.apache.atlas.authorize.AtlasAuthorizerFactory.ATLAS_AUTHORIZER_IMPL; import static org.apache.atlas.authorize.AtlasAuthorizerFactory.CURRENT_AUTHORIZER_IMPL; -import static org.apache.atlas.repository.Constants.ATTR_ADMIN_GROUPS; -import static org.apache.atlas.repository.Constants.ATTR_ADMIN_ROLES; -import static org.apache.atlas.repository.Constants.ATTR_ADMIN_USERS; -import static org.apache.atlas.repository.Constants.CREATED_BY_KEY; -import static org.apache.atlas.repository.Constants.POLICY_ENTITY_TYPE; -import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; +import static org.apache.atlas.repository.Constants.*; import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; -import static org.apache.atlas.auth.client.keycloak.AtlasKeycloakClient.getKeycloakClient; public class ConnectionPreProcessor implements PreProcessor { private static final Logger LOG = LoggerFactory.getLogger(ConnectionPreProcessor.class); @@ -154,60 +146,87 @@ private void processCreateConnection(AtlasStruct struct) throws AtlasBaseExcepti } } - private void processUpdateConnection(EntityMutationContext context, - AtlasStruct entity) throws AtlasBaseException { - + private void processUpdateConnection(EntityMutationContext context, AtlasStruct entity) throws AtlasBaseException { AtlasEntity connection = (AtlasEntity) entity; - if (ATLAS_AUTHORIZER_IMPL.equalsIgnoreCase(CURRENT_AUTHORIZER_IMPL)) { AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processUpdateConnection"); - AtlasVertex vertex = context.getVertex(connection.getGuid()); AtlasEntity existingConnEntity = entityRetriever.toAtlasEntity(vertex); - String roleName = String.format(CONN_NAME_PATTERN, connection.getGuid()); - String vertexQName = vertex.getProperty(QUALIFIED_NAME, String.class); entity.setAttribute(QUALIFIED_NAME, vertexQName); - RoleRepresentation representation = getKeycloakClient().getRoleByName(roleName); - String creatorUser = vertex.getProperty(CREATED_BY_KEY, String.class); - - if (connection.hasAttribute(ATTR_ADMIN_USERS)) { - List newAdminUsers = (List) connection.getAttribute(ATTR_ADMIN_USERS); - List currentAdminUsers = (List) existingConnEntity.getAttribute(ATTR_ADMIN_USERS); - if (StringUtils.isNotEmpty(creatorUser) && !newAdminUsers.contains(creatorUser)) { - newAdminUsers.add(creatorUser); - } - - connection.setAttribute(ATTR_ADMIN_USERS, newAdminUsers); - if (CollectionUtils.isNotEmpty(newAdminUsers) || CollectionUtils.isNotEmpty(currentAdminUsers)) { - keycloakStore.updateRoleUsers(roleName, currentAdminUsers, newAdminUsers, representation); - } - } + //optional is used here to distinguish if the admin related attributes are set in request body or not (else part) + //if set, check for empty list so that appropriate error can be thrown + List newAdminUsers = getAttributeList(connection, ATTR_ADMIN_USERS).orElse(null); + List currentAdminUsers = getAttributeList(existingConnEntity, ATTR_ADMIN_USERS).orElseGet(ArrayList::new); + + List newAdminGroups = getAttributeList(connection, ATTR_ADMIN_GROUPS).orElse(null); + List currentAdminGroups = getAttributeList(existingConnEntity, ATTR_ADMIN_GROUPS).orElseGet(ArrayList::new); + + List newAdminRoles = getAttributeList(connection, ATTR_ADMIN_ROLES).orElse(null); + List currentAdminRoles = getAttributeList(existingConnEntity, ATTR_ADMIN_ROLES).orElseGet(ArrayList::new); - if (connection.hasAttribute(ATTR_ADMIN_GROUPS)) { - List newAdminGroups = (List) connection.getAttribute(ATTR_ADMIN_GROUPS); - List currentAdminGroups = (List) existingConnEntity.getAttribute(ATTR_ADMIN_GROUPS); + // Check conditions and throw exceptions as necessary - if (CollectionUtils.isNotEmpty(newAdminGroups) || CollectionUtils.isNotEmpty(currentAdminGroups)) { - keycloakStore.updateRoleGroups(roleName, currentAdminGroups, newAdminGroups, representation); - } + // If all new admin attributes are null, no action required as these are not meant to update in the request + if (newAdminUsers == null && newAdminGroups == null && newAdminRoles == null) { + RequestContext.get().endMetricRecord(metricRecorder); + return; } - if (connection.hasAttribute(ATTR_ADMIN_ROLES)) { - List newAdminRoles = (List) connection.getAttribute(ATTR_ADMIN_ROLES); - List currentAdminRoles = (List) existingConnEntity.getAttribute(ATTR_ADMIN_ROLES); + // Throw exception if all new admin attributes are empty but not null + boolean emptyName = newAdminUsers != null && newAdminUsers.isEmpty(); + boolean emptyGroup = newAdminGroups != null && newAdminGroups.isEmpty(); + boolean emptyRole = newAdminRoles != null && newAdminRoles.isEmpty(); - if (CollectionUtils.isNotEmpty(newAdminRoles) || CollectionUtils.isNotEmpty(currentAdminRoles)) { - keycloakStore.updateRoleRoles(roleName, currentAdminRoles, newAdminRoles, representation); - } + if (emptyName && emptyGroup && emptyRole) { + throw new AtlasBaseException(AtlasErrorCode.ADMIN_LIST_SHOULD_NOT_BE_EMPTY, existingConnEntity.getTypeName()); + } + // Update Keycloak roles + RoleRepresentation representation = getKeycloakClient().getRoleByName(roleName); + List finalStateUsers = determineFinalState(newAdminUsers, currentAdminUsers); + List finalStateGroups = determineFinalState(newAdminGroups, currentAdminGroups); + List finalStateRoles = determineFinalState(newAdminRoles, currentAdminRoles); + //this is the case where the final state after comparison with current and new value of all the attributes become empty + if (allEmpty(finalStateUsers, finalStateGroups, finalStateRoles)) { + throw new AtlasBaseException(AtlasErrorCode.ADMIN_LIST_SHOULD_NOT_BE_EMPTY, existingConnEntity.getTypeName()); } + keycloakStore.updateRoleUsers(roleName, currentAdminUsers, finalStateUsers, representation); + keycloakStore.updateRoleGroups(roleName, currentAdminGroups, finalStateGroups, representation); + keycloakStore.updateRoleRoles(roleName, currentAdminRoles, finalStateRoles, representation); + + RequestContext.get().endMetricRecord(metricRecorder); } } + // if the list is null -> we don't want to change + // if the list is empty -> we want to remove all elements + // if the list is non-empty -> we want to replace + private List determineFinalState(List newAdmins, List currentAdmins) { + return newAdmins == null ? currentAdmins : newAdmins; + } + + private boolean allEmpty(List... lists) { + if (lists == null || lists.length == 0) { + return true; + } + return Stream.of(lists).allMatch(list -> list != null && list.isEmpty()); + } + + + private Optional> getAttributeList(AtlasEntity entity, String attributeName) { + if (entity.hasAttribute(attributeName)) { + if (Objects.isNull(entity.getAttribute(attributeName))) { + return Optional.of(new ArrayList<>(0)); + } + return Optional.of((List) entity.getAttribute(attributeName)); + } + return Optional.empty(); + } + @Override public void processDelete(AtlasVertex vertex) throws AtlasBaseException { // Process Delete connection role and policies in case of hard delete or purge @@ -242,7 +261,7 @@ private boolean isDeleteTypeSoft() { private List getConnectionPolicies(String guid, String roleName) throws AtlasBaseException { List ret = new ArrayList<>(); - + IndexSearchParams indexSearchParams = new IndexSearchParams(); Map dsl = new HashMap<>(); diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessor.java index 0ed51910044..45486e69176 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessor.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessor.java @@ -11,6 +11,8 @@ import static org.apache.atlas.repository.Constants.ATLAS_GLOSSARY_CATEGORY_ENTITY_TYPE; import static org.apache.atlas.repository.Constants.ATLAS_GLOSSARY_TERM_ENTITY_TYPE; +import static org.apache.atlas.repository.Constants.STAKEHOLDER_ENTITY_TYPE; +import static org.apache.atlas.repository.Constants.STAKEHOLDER_TITLE_ENTITY_TYPE; public interface PreProcessor { @@ -18,6 +20,8 @@ public interface PreProcessor { Set skipInitialAuthCheckTypes = new HashSet() {{ add(ATLAS_GLOSSARY_TERM_ENTITY_TYPE); add(ATLAS_GLOSSARY_CATEGORY_ENTITY_TYPE); + add(STAKEHOLDER_ENTITY_TYPE); + add(STAKEHOLDER_TITLE_ENTITY_TYPE); }}; void processAttributes(AtlasStruct entity, EntityMutationContext context, EntityMutations.EntityOperation operation) throws AtlasBaseException; diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessorUtils.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessorUtils.java index 6c849004602..3dc97fa642e 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessorUtils.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/PreProcessorUtils.java @@ -1,7 +1,11 @@ package org.apache.atlas.repository.store.graph.v2.preprocessor; +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.discovery.EntityDiscoveryService; import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.discovery.IndexSearchParams; import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasEntityHeader; import org.apache.atlas.model.instance.AtlasObjectId; import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; @@ -11,13 +15,17 @@ import org.apache.atlas.type.AtlasTypeRegistry; import org.apache.atlas.util.NanoIdUtils; import org.apache.atlas.utils.AtlasEntityUtil; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.*; + import static org.apache.atlas.repository.Constants.QUERY_COLLECTION_ENTITY_TYPE; import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; import static org.apache.atlas.repository.Constants.ENTITY_TYPE_PROPERTY_KEY; +import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; public class PreProcessorUtils { private static final Logger LOG = LoggerFactory.getLogger(PreProcessorUtils.class); @@ -32,6 +40,37 @@ public class PreProcessorUtils { public static final String GLOSSARY_TERM_REL_TYPE = "AtlasGlossaryTermAnchor"; public static final String GLOSSARY_CATEGORY_REL_TYPE = "AtlasGlossaryCategoryAnchor"; + //DataMesh models constants + public static final String PARENT_DOMAIN_REL_TYPE = "parentDomain"; + public static final String SUB_DOMAIN_REL_TYPE = "subDomains"; + public static final String DATA_PRODUCT_REL_TYPE = "dataProducts"; + public static final String MIGRATION_CUSTOM_ATTRIBUTE = "isQualifiedNameMigrated"; + public static final String DATA_DOMAIN_REL_TYPE = "dataDomain"; + public static final String STAKEHOLDER_REL_TYPE = "stakeholders"; + + public static final String MESH_POLICY_CATEGORY = "datamesh"; + + public static final String DATA_PRODUCT_EDGE_LABEL = "__DataDomain.dataProducts"; + public static final String DOMAIN_PARENT_EDGE_LABEL = "__DataDomain.subDomains"; + + public static final String PARENT_DOMAIN_QN_ATTR = "parentDomainQualifiedName"; + public static final String SUPER_DOMAIN_QN_ATTR = "superDomainQualifiedName"; + public static final String DAAP_VISIBILITY_ATTR = "daapVisibility"; + public static final String DAAP_VISIBILITY_USERS_ATTR = "daapVisibilityUsers"; + public static final String DAAP_VISIBILITY_GROUPS_ATTR = "daapVisibilityGroups"; + public static final String OUTPUT_PORT_GUIDS_ATTR = "daapOutputPortGuids"; + public static final String INPUT_PORT_GUIDS_ATTR = "daapInputPortGuids"; + + //Migration Constants + public static final String MIGRATION_TYPE_PREFIX = "MIGRATION:"; + public static final String DATA_MESH_QN = MIGRATION_TYPE_PREFIX + "DATA_MESH_QN"; + + public enum MigrationStatus { + IN_PROGRESS, + SUCCESSFUL, + FAILED; + } + //Query models constants public static final String PREFIX_QUERY_QN = "default/collection/"; public static final String COLLECTION_QUALIFIED_NAME = "collectionQualifiedName"; @@ -107,4 +146,58 @@ public static String updateQueryResourceAttributes(AtlasTypeRegistry typeRegistr return newCollectionQualifiedName; } + + public static List indexSearchPaginated(Map dsl, Set attributes, EntityDiscoveryService discovery) throws AtlasBaseException { + IndexSearchParams searchParams = new IndexSearchParams(); + List ret = new ArrayList<>(); + + if (CollectionUtils.isNotEmpty(attributes)) { + searchParams.setAttributes(attributes); + } + + List sortList = new ArrayList<>(0); + sortList.add(mapOf("__timestamp", mapOf("order", "asc"))); + sortList.add(mapOf("__guid", mapOf("order", "asc"))); + dsl.put("sort", sortList); + + int from = 0; + int size = 100; + boolean hasMore = true; + do { + dsl.put("from", from); + dsl.put("size", size); + searchParams.setDsl(dsl); + + List headers = discovery.directIndexSearch(searchParams).getEntities(); + + if (CollectionUtils.isNotEmpty(headers)) { + ret.addAll(headers); + } else { + hasMore = false; + } + + from += size; + + } while (hasMore); + + return ret; + } + + public static void verifyDuplicateAssetByName(String typeName, String assetName, EntityDiscoveryService discovery, String errorMessage) throws AtlasBaseException { + List> mustClauseList = new ArrayList(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", typeName))); + mustClauseList.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustClauseList.add(mapOf("term", mapOf("name.keyword", assetName))); + + + Map bool = mapOf("must", mustClauseList); + + Map dsl = mapOf("query", mapOf("bool", bool)); + + List assets = indexSearchPaginated(dsl, null, discovery); + + if (CollectionUtils.isNotEmpty(assets)) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, errorMessage); + } + } } diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/accesscontrol/PersonaPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/accesscontrol/PersonaPreProcessor.java index 3541e3e4a72..222bca09628 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/accesscontrol/PersonaPreProcessor.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/accesscontrol/PersonaPreProcessor.java @@ -19,6 +19,7 @@ import org.apache.atlas.RequestContext; +import org.apache.atlas.discovery.EntityDiscoveryService; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.auth.client.keycloak.AtlasKeycloakClient; import org.apache.atlas.model.instance.AtlasEntity; @@ -73,12 +74,12 @@ public class PersonaPreProcessor implements PreProcessor { private static final Logger LOG = LoggerFactory.getLogger(PersonaPreProcessor.class); - private final AtlasGraph graph; - private final AtlasTypeRegistry typeRegistry; - private final EntityGraphRetriever entityRetriever; - private IndexAliasStore aliasStore; - private AtlasEntityStore entityStore; - private KeycloakStore keycloakStore; + protected final AtlasGraph graph; + protected AtlasTypeRegistry typeRegistry; + protected final EntityGraphRetriever entityRetriever; + protected IndexAliasStore aliasStore; + protected AtlasEntityStore entityStore; + protected KeycloakStore keycloakStore; public PersonaPreProcessor(AtlasGraph graph, AtlasTypeRegistry typeRegistry, @@ -209,7 +210,7 @@ private void updatePoliciesIsEnabledAttr(EntityMutationContext context, AtlasEnt } } - private String createKeycloakRole(AtlasEntity entity) throws AtlasBaseException { + protected String createKeycloakRole(AtlasEntity entity) throws AtlasBaseException { String roleName = getPersonaRoleName(entity); List users = getPersonaUsers(entity); List groups = getPersonaGroups(entity); @@ -228,7 +229,7 @@ private String createKeycloakRole(AtlasEntity entity) throws AtlasBaseException return role.getId(); } - private void updateKeycloakRole(AtlasEntity newPersona, AtlasEntity existingPersona) throws AtlasBaseException { + protected void updateKeycloakRole(AtlasEntity newPersona, AtlasEntity existingPersona) throws AtlasBaseException { String roleId = getPersonaRoleId(existingPersona); String roleName = getPersonaRoleName(existingPersona); diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/accesscontrol/StakeholderPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/accesscontrol/StakeholderPreProcessor.java new file mode 100644 index 00000000000..1adbb8ec476 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/accesscontrol/StakeholderPreProcessor.java @@ -0,0 +1,322 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.atlas.repository.store.graph.v2.preprocessor.accesscontrol; + + +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.AtlasException; +import org.apache.atlas.RequestContext; +import org.apache.atlas.authorize.AtlasAuthorizationUtils; +import org.apache.atlas.authorize.AtlasEntityAccessRequest; +import org.apache.atlas.authorize.AtlasPrivilege; +import org.apache.atlas.discovery.EntityDiscoveryService; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasEntityHeader; +import org.apache.atlas.model.instance.AtlasObjectId; +import org.apache.atlas.model.instance.AtlasStruct; +import org.apache.atlas.model.instance.EntityMutations; +import org.apache.atlas.repository.graphdb.AtlasGraph; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.store.graph.AtlasEntityStore; +import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; +import org.apache.atlas.repository.store.graph.v2.EntityMutationContext; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import static java.lang.String.format; +import static org.apache.atlas.AtlasErrorCode.BAD_REQUEST; +import static org.apache.atlas.AtlasErrorCode.OPERATION_NOT_SUPPORTED; +import static org.apache.atlas.repository.Constants.NAME; +import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; +import static org.apache.atlas.repository.Constants.STAKEHOLDER_ENTITY_TYPE; +import static org.apache.atlas.repository.Constants.STAKEHOLDER_TITLE_ENTITY_TYPE; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.indexSearchPaginated; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh.StakeholderTitlePreProcessor.ATTR_DOMAIN_QUALIFIED_NAMES; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh.StakeholderTitlePreProcessor.STAR; +import static org.apache.atlas.repository.util.AccessControlUtils.ATTR_ACCESS_CONTROL_ENABLED; +import static org.apache.atlas.repository.util.AccessControlUtils.ATTR_PERSONA_ROLE_ID; +import static org.apache.atlas.repository.util.AccessControlUtils.REL_ATTR_POLICIES; +import static org.apache.atlas.repository.util.AccessControlUtils.getESAliasName; +import static org.apache.atlas.repository.util.AccessControlUtils.getPersonaRoleId; +import static org.apache.atlas.repository.util.AccessControlUtils.getUUID; +import static org.apache.atlas.repository.util.AccessControlUtils.validateNoPoliciesAttached; +import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; + +public class StakeholderPreProcessor extends PersonaPreProcessor { + private static final Logger LOG = LoggerFactory.getLogger(StakeholderPreProcessor.class); + + public static final String ATTR_DOMAIN_QUALIFIED_NAME = "stakeholderDomainQualifiedName"; + public static final String ATTR_STAKEHOLDER_TITLE_GUID = "stakeholderTitleGuid"; + + public static final String REL_ATTR_STAKEHOLDER_TITLE = "stakeholderTitle"; + public static final String REL_ATTR_STAKEHOLDER_DOMAIN = "stakeholderDataDomain"; + + protected EntityDiscoveryService discovery; + + public StakeholderPreProcessor(AtlasGraph graph, + AtlasTypeRegistry typeRegistry, + EntityGraphRetriever entityRetriever, + AtlasEntityStore entityStore) { + super(graph, typeRegistry, entityRetriever, entityStore); + + try { + this.discovery = new EntityDiscoveryService(typeRegistry, graph, null, null, null, null); + } catch (AtlasException e) { + e.printStackTrace(); + } + } + + @Override + public void processAttributes(AtlasStruct entityStruct, EntityMutationContext context, + EntityMutations.EntityOperation operation) throws AtlasBaseException { + if (LOG.isDebugEnabled()) { + LOG.debug("StakeholderPreProcessor.processAttributes: pre processing {}, {}", entityStruct.getAttribute(QUALIFIED_NAME), operation); + } + + AtlasEntity entity = (AtlasEntity) entityStruct; + + switch (operation) { + case CREATE: + processCreateStakeholder(entity); + break; + case UPDATE: + processUpdateStakeholder(context, entity); + break; + } + } + + @Override + public void processDelete(AtlasVertex vertex) throws AtlasBaseException { + AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo = entityRetriever.toAtlasEntityWithExtInfo(vertex); + AtlasEntity stakeholder = entityWithExtInfo.getEntity(); + + if(!stakeholder.getStatus().equals(AtlasEntity.Status.ACTIVE)) { + LOG.info("Stakeholder is already deleted/purged"); + return; + } + + //delete policies + List policies = (List) stakeholder.getRelationshipAttribute(REL_ATTR_POLICIES); + if (CollectionUtils.isNotEmpty(policies)) { + for (AtlasObjectId policyObjectId : policies) { + entityStore.deleteById(policyObjectId.getGuid()); + } + } + + //remove role + keycloakStore.removeRole(getPersonaRoleId(stakeholder)); + + //delete ES alias + aliasStore.deleteAlias(getESAliasName(stakeholder)); + } + + private void processCreateStakeholder(AtlasEntity entity) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processCreateStakeholder"); + + validateNoPoliciesAttached(entity); + + if (!entity.hasRelationshipAttribute(REL_ATTR_STAKEHOLDER_TITLE) || !entity.hasRelationshipAttribute(REL_ATTR_STAKEHOLDER_DOMAIN)) { + throw new AtlasBaseException(BAD_REQUEST, + String.format("Relationships %s and %s are mandatory", REL_ATTR_STAKEHOLDER_TITLE, REL_ATTR_STAKEHOLDER_DOMAIN)); + } + + String domainQualifiedName = getQualifiedNameFromRelationAttribute(entity, REL_ATTR_STAKEHOLDER_DOMAIN); + String stakeholderTitleGuid = getGuidFromRelationAttribute(entity, REL_ATTR_STAKEHOLDER_TITLE); + + ensureTitleAvailableForDomain(domainQualifiedName, stakeholderTitleGuid); + + //validate Stakeholder & StakeholderTitle pair is unique for this domain + verifyDuplicateStakeholderByDomainAndTitle(domainQualifiedName, stakeholderTitleGuid); + + //validate Name uniqueness for Stakeholders across this domain + String name = (String) entity.getAttribute(NAME); + verifyDuplicateStakeholderByName(name, domainQualifiedName, discovery); + + entity.setAttribute(ATTR_DOMAIN_QUALIFIED_NAME, domainQualifiedName); + entity.setAttribute(ATTR_STAKEHOLDER_TITLE_GUID, stakeholderTitleGuid); + + String stakeholderQualifiedName = format("default/%s/%s", + getUUID(), + domainQualifiedName); + + entity.setAttribute(QUALIFIED_NAME, stakeholderQualifiedName); + + + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_CREATE, new AtlasEntityHeader(entity)), + "create Stakeholder: ", entity.getAttribute(NAME)); + + entity.setAttribute(ATTR_ACCESS_CONTROL_ENABLED, entity.getAttributes().getOrDefault(ATTR_ACCESS_CONTROL_ENABLED, true)); + + //create keycloak role + String roleId = createKeycloakRole(entity); + + entity.setAttribute(ATTR_PERSONA_ROLE_ID, roleId); + + //create ES alias + aliasStore.createAlias(entity); + + RequestContext.get().endMetricRecord(metricRecorder); + } + + private void processUpdateStakeholder(EntityMutationContext context, AtlasEntity stakeholder) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processUpdateStakeholder"); + + validateNoPoliciesAttached(stakeholder); + + AtlasVertex vertex = context.getVertex(stakeholder.getGuid()); + + AtlasEntity existingStakeholderEntity = entityRetriever.toAtlasEntity(vertex); + + if (!AtlasEntity.Status.ACTIVE.equals(existingStakeholderEntity.getStatus())) { + throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Stakeholder is not Active"); + } + + stakeholder.removeAttribute(ATTR_DOMAIN_QUALIFIED_NAME); + stakeholder.removeAttribute(ATTR_STAKEHOLDER_TITLE_GUID); + stakeholder.removeAttribute(ATTR_PERSONA_ROLE_ID); + + if (MapUtils.isNotEmpty(stakeholder.getRelationshipAttributes())) { + stakeholder.getRelationshipAttributes().remove(REL_ATTR_STAKEHOLDER_DOMAIN); + stakeholder.getRelationshipAttributes().remove(REL_ATTR_STAKEHOLDER_TITLE); + } + + String currentName = vertex.getProperty(NAME, String.class); + String newName = (String) stakeholder.getAttribute(NAME); + + if (!currentName.equals(newName)) { + verifyDuplicateStakeholderByName(newName, (String) existingStakeholderEntity.getAttribute(ATTR_DOMAIN_QUALIFIED_NAME), discovery); + } + + String vertexQName = vertex.getProperty(QUALIFIED_NAME, String.class); + stakeholder.setAttribute(QUALIFIED_NAME, vertexQName); + + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_UPDATE, new AtlasEntityHeader(stakeholder)), + "update Stakeholder: ", stakeholder.getAttribute(NAME)); + + updateKeycloakRole(stakeholder, existingStakeholderEntity); + + RequestContext.get().endMetricRecord(metricRecorder); + } + + private String getGuidFromRelationAttribute(AtlasEntity entity, String relationshipAttributeName) throws AtlasBaseException { + AtlasObjectId relationObjectId = (AtlasObjectId) entity.getRelationshipAttribute(relationshipAttributeName); + + String guid = relationObjectId.getGuid(); + if (StringUtils.isEmpty(guid)) { + AtlasVertex vertex = entityRetriever.getEntityVertex(relationObjectId); + guid = vertex.getProperty("__guid", String.class); + } + + return guid; + } + + private String getQualifiedNameFromRelationAttribute(AtlasEntity entity, String relationshipAttributeName) throws AtlasBaseException { + AtlasObjectId relationObjectId = (AtlasObjectId) entity.getRelationshipAttribute(relationshipAttributeName); + String qualifiedName = null; + + if (relationObjectId.getUniqueAttributes() != null) { + qualifiedName = (String) relationObjectId.getUniqueAttributes().get(QUALIFIED_NAME); + } + + if (StringUtils.isEmpty(qualifiedName)) { + AtlasVertex vertex = entityRetriever.getEntityVertex(relationObjectId); + qualifiedName = vertex.getProperty(QUALIFIED_NAME, String.class); + } + + return qualifiedName; + } + + protected void verifyDuplicateStakeholderByDomainAndTitle(String domainQualifiedName, String stakeholderTitleGuid) throws AtlasBaseException { + + List> mustClauseList = new ArrayList(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", STAKEHOLDER_ENTITY_TYPE))); + mustClauseList.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustClauseList.add(mapOf("term", mapOf(ATTR_DOMAIN_QUALIFIED_NAME, domainQualifiedName))); + mustClauseList.add(mapOf("term", mapOf(ATTR_STAKEHOLDER_TITLE_GUID, stakeholderTitleGuid))); + + + Map bool = mapOf("must", mustClauseList); + Map dsl = mapOf("query", mapOf("bool", bool)); + + List assets = indexSearchPaginated(dsl, null, this.discovery); + + if (CollectionUtils.isNotEmpty(assets)) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, + format("Stakeholder for provided title & domain combination already exists with name: %s", assets.get(0).getAttribute(NAME))); + } + } + + protected void ensureTitleAvailableForDomain(String domainQualifiedName, String stakeholderTitleGuid) throws AtlasBaseException { + + List> mustClauseList = new ArrayList(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", STAKEHOLDER_TITLE_ENTITY_TYPE))); + mustClauseList.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustClauseList.add(mapOf("term", mapOf("__guid", stakeholderTitleGuid))); + + Map bool = mapOf("must", mustClauseList); + Map dsl = mapOf("query", mapOf("bool", bool)); + + List assets = indexSearchPaginated(dsl, Collections.singleton(ATTR_DOMAIN_QUALIFIED_NAMES), this.discovery); + + if (CollectionUtils.isNotEmpty(assets)) { + AtlasEntityHeader stakeholderTitleHeader = assets.get(0); + + List domainQualifiedNames = (List) stakeholderTitleHeader.getAttribute(ATTR_DOMAIN_QUALIFIED_NAMES); + + if (!domainQualifiedNames.contains(STAR)) { + Optional parentDomain = domainQualifiedNames.stream().filter(x -> domainQualifiedName.startsWith(x)).findFirst(); + + if (!parentDomain.isPresent()) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Provided StakeholderTitle is not applicable to the domain"); + } + } + } + } + + public static void verifyDuplicateStakeholderByName(String assetName, String domainQualifiedName, EntityDiscoveryService discovery) throws AtlasBaseException { + + List> mustClauseList = new ArrayList(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", STAKEHOLDER_ENTITY_TYPE))); + mustClauseList.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustClauseList.add(mapOf("term", mapOf("name.keyword", assetName))); + mustClauseList.add(mapOf("term", mapOf(ATTR_DOMAIN_QUALIFIED_NAME, domainQualifiedName))); + + + Map bool = mapOf("must", mustClauseList); + Map dsl = mapOf("query", mapOf("bool", bool)); + + List assets = indexSearchPaginated(dsl, null, discovery); + + if (CollectionUtils.isNotEmpty(assets)) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, + format("Stakeholder with name %s already exists for current domain", assetName)); + } + } +} diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/AbstractContractPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/AbstractContractPreProcessor.java new file mode 100644 index 00000000000..0a4521e34b6 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/AbstractContractPreProcessor.java @@ -0,0 +1,96 @@ +package org.apache.atlas.repository.store.graph.v2.preprocessor.contract; + +import org.apache.atlas.RequestContext; +import org.apache.atlas.authorize.AtlasAuthorizationUtils; +import org.apache.atlas.authorize.AtlasEntityAccessRequest; +import org.apache.atlas.authorize.AtlasPrivilege; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.TypeCategory; +import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasEntityHeader; +import org.apache.atlas.repository.graphdb.AtlasGraph; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2; +import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; +import org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessor; +import org.apache.atlas.type.AtlasEntityType; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.Map; + +import static org.apache.atlas.AtlasErrorCode.INSTANCE_BY_UNIQUE_ATTRIBUTE_NOT_FOUND; +import static org.apache.atlas.AtlasErrorCode.TYPE_NAME_INVALID; +import static org.apache.atlas.repository.Constants.*; + +public abstract class AbstractContractPreProcessor implements PreProcessor { + private static final Logger LOG = LoggerFactory.getLogger(AbstractContractPreProcessor.class); + + public final AtlasTypeRegistry typeRegistry; + public final EntityGraphRetriever entityRetriever; + public final AtlasGraph graph; + + + AbstractContractPreProcessor(AtlasGraph graph, AtlasTypeRegistry typeRegistry, + EntityGraphRetriever entityRetriever) { + this.graph = graph; + this.typeRegistry = typeRegistry; + this.entityRetriever = entityRetriever; + } + + void authorizeContractCreateOrUpdate(AtlasEntity contractEntity, AtlasEntity.AtlasEntityWithExtInfo associatedAsset) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("authorizeContractUpdate"); + try { + AtlasEntityHeader entityHeader = new AtlasEntityHeader(associatedAsset.getEntity()); + + //First authorize entity update access + verifyAssetAccess(entityHeader, AtlasPrivilege.ENTITY_UPDATE, contractEntity, AtlasPrivilege.ENTITY_UPDATE); + + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + + private void verifyAssetAccess(AtlasEntityHeader asset, AtlasPrivilege assetPrivilege, + AtlasEntity contract, AtlasPrivilege contractPrivilege) throws AtlasBaseException { + verifyAccess(asset, assetPrivilege); + verifyAccess(contract, contractPrivilege); + } + + private void verifyAccess(AtlasEntity entity, AtlasPrivilege privilege) throws AtlasBaseException { + verifyAccess(new AtlasEntityHeader(entity), privilege); + } + + private void verifyAccess(AtlasEntityHeader entityHeader, AtlasPrivilege privilege) throws AtlasBaseException { + String errorMessage = privilege.name() + " entity: " + entityHeader.getTypeName(); + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, privilege, entityHeader), errorMessage); + } + + AtlasEntity.AtlasEntityWithExtInfo getAssociatedAsset(String datasetQName, String typeName) throws AtlasBaseException { + + Map uniqAttributes = new HashMap<>(); + uniqAttributes.put(QUALIFIED_NAME, datasetQName); + + AtlasEntityType entityType = ensureEntityType(typeName); + + AtlasVertex entityVertex = AtlasGraphUtilsV2.getVertexByUniqueAttributes(graph, entityType, uniqAttributes); + + return entityRetriever.toAtlasEntityWithExtInfo(entityVertex); + } + + AtlasEntityType ensureEntityType(String typeName) throws AtlasBaseException { + AtlasEntityType ret = typeRegistry.getEntityTypeByName(typeName); + + if (ret == null) { + throw new AtlasBaseException(TYPE_NAME_INVALID, TypeCategory.ENTITY.name(), typeName); + } + + return ret; + } + + +} diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/ContractPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/ContractPreProcessor.java new file mode 100644 index 00000000000..98add96a39b --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/ContractPreProcessor.java @@ -0,0 +1,307 @@ +package org.apache.atlas.repository.store.graph.v2.preprocessor.contract; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; + +import org.apache.atlas.RequestContext; +import org.apache.atlas.discovery.EntityDiscoveryService; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.discovery.AtlasSearchResult; +import org.apache.atlas.model.discovery.IndexSearchParams; +import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; +import org.apache.atlas.model.instance.AtlasStruct; +import org.apache.atlas.model.instance.EntityMutations; +import org.apache.atlas.repository.graphdb.AtlasGraph; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.store.graph.v2.*; +import org.apache.atlas.type.AtlasEntityType; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.commons.collections.CollectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import java.util.*; + +import static org.apache.atlas.AtlasErrorCode.*; +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; +import static org.apache.atlas.type.AtlasTypeUtil.getAtlasObjectId; + +public class ContractPreProcessor extends AbstractContractPreProcessor { + private static final Logger LOG = LoggerFactory.getLogger(ContractPreProcessor.class); + public static final String ATTR_ASSET_GUID = "dataContractAssetGuid"; + public static final String REL_ATTR_LATEST_CONTRACT = "dataContractLatest"; + public static final String REL_ATTR_GOVERNED_ASSET_CERTIFIED = "dataContractLatestCertified"; + public static final String REL_ATTR_PREVIOUS_VERSION = "dataContractPreviousVersion"; + public static final String ASSET_ATTR_HAS_CONTRACT = "hasContract"; + public static final String CONTRACT_QUALIFIED_NAME_SUFFIX = "contract"; + public static final String CONTRACT_ATTR_STATUS = "status"; + private static final Set contractAttributes = new HashSet<>(); + static { + contractAttributes.add(ATTR_CONTRACT); + contractAttributes.add(ATTR_CERTIFICATE_STATUS); + contractAttributes.add(ATTR_CONTRACT_VERSION); + } + private final boolean storeDifferentialAudits; + private final EntityDiscoveryService discovery; + + private final AtlasEntityComparator entityComparator; + + + public ContractPreProcessor(AtlasGraph graph, AtlasTypeRegistry typeRegistry, + EntityGraphRetriever entityRetriever, + boolean storeDifferentialAudits, EntityDiscoveryService discovery) { + + super(graph, typeRegistry, entityRetriever); + this.storeDifferentialAudits = storeDifferentialAudits; + this.discovery = discovery; + this.entityComparator = new AtlasEntityComparator(typeRegistry, entityRetriever, null, true, true); + + } + + @Override + public void processAttributes(AtlasStruct entityStruct, EntityMutationContext context, EntityMutations.EntityOperation operation) throws AtlasBaseException { + AtlasEntity entity = (AtlasEntity) entityStruct; + switch (operation) { + case CREATE: + processCreateContract(entity, context); + break; + case UPDATE: + // Updating an existing version of the contract + processUpdateContract(entity, context); + } + + } + + private void processUpdateContract(AtlasEntity entity, EntityMutationContext context) throws AtlasBaseException { + String contractString = (String) entity.getAttribute(ATTR_CONTRACT); + AtlasVertex vertex = context.getVertex(entity.getGuid()); + AtlasEntity existingContractEntity = entityRetriever.toAtlasEntity(vertex); + // No update to relationships allowed for the existing contract version + resetAllRelationshipAttributes(entity); + if (!isEqualContract(contractString, (String) existingContractEntity.getAttribute(ATTR_CONTRACT))) { + // Update the same asset(entity) + throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Can't update a specific version of contract"); + } + } + private void processCreateContract(AtlasEntity entity, EntityMutationContext context) throws AtlasBaseException { + /* + Low-level Design + | Authorization + | Deserialization of the JSON + ---| Validation of spec + | Validation of contract + | Create Version + | Create Draft + ---| asset to contract sync + | Create Publish + ---| two-way sync of attribute + */ + + String contractQName = (String) entity.getAttribute(QUALIFIED_NAME); + validateAttribute(!contractQName.endsWith(String.format("/%s", CONTRACT_QUALIFIED_NAME_SUFFIX)), "Invalid qualifiedName for the contract."); + + String contractString = (String) entity.getAttribute(ATTR_CONTRACT); + DataContract contract = DataContract.deserialize(contractString); + String datasetQName = contractQName.substring(0, contractQName.lastIndexOf('/')); + contractQName = String.format("%s/%s/%s", datasetQName, contract.getType().name(), CONTRACT_QUALIFIED_NAME_SUFFIX); + AtlasEntityWithExtInfo associatedAsset = getAssociatedAsset(datasetQName, contract.getType().name()); + + authorizeContractCreateOrUpdate(entity, associatedAsset); + + boolean contractSync = syncContractCertificateStatus(entity, contract); + contractString = DataContract.serialize(contract); + entity.setAttribute(ATTR_CONTRACT, contractString); + + + AtlasEntity currentVersionEntity = getCurrentVersion(associatedAsset.getEntity().getGuid()); + Long newVersionNumber = 1L; + if (currentVersionEntity != null) { + // Contract already exist + Long currentVersionNumber = (Long) currentVersionEntity.getAttribute(ATTR_CONTRACT_VERSION); + List attributes = getDiffAttributes(entity, currentVersionEntity); + if (attributes.isEmpty()) { + // No changes in the contract, Not creating new version + removeCreatingVertex(context, entity); + return; + } else if (isEqualContract(contractString, (String) currentVersionEntity.getAttribute(ATTR_CONTRACT))) { + resetAllRelationshipAttributes(entity); + // No change in contract, metadata changed + updateExistingVersion(context, entity, currentVersionEntity); + newVersionNumber = currentVersionNumber; + } else { + // contract changed (metadata might/not changed). Create new version. + newVersionNumber = currentVersionNumber + 1; + + resetAllRelationshipAttributes(entity); + // Attach previous version via rel + entity.setRelationshipAttribute(REL_ATTR_PREVIOUS_VERSION, getAtlasObjectId(currentVersionEntity)); + AtlasVertex vertex = AtlasGraphUtilsV2.findByGuid(currentVersionEntity.getGuid()); + AtlasEntityType entityType = ensureEntityType(currentVersionEntity.getTypeName()); + context.addUpdated(currentVersionEntity.getGuid(), currentVersionEntity, entityType, vertex); + + } + } + entity.setAttribute(QUALIFIED_NAME, String.format("%s/V%s", contractQName, newVersionNumber)); + entity.setAttribute(ATTR_CONTRACT_VERSION, newVersionNumber); + entity.setAttribute(ATTR_ASSET_GUID, associatedAsset.getEntity().getGuid()); + + datasetAttributeSync(context, associatedAsset.getEntity(), entity); + + } + + private List getDiffAttributes(AtlasEntity entity, AtlasEntity latestExistingVersion) throws AtlasBaseException { + AtlasEntityComparator.AtlasEntityDiffResult diffResult = entityComparator.getDiffResult(entity, latestExistingVersion, false); + List attributesSet = new ArrayList<>(); + + if (diffResult.hasDifference()) { + for (Map.Entry entry : diffResult.getDiffEntity().getAttributes().entrySet()) { + if (!entry.getKey().equals(QUALIFIED_NAME)) { + attributesSet.add(entry.getKey()); + } + } + } + return attributesSet; + } + + private boolean isEqualContract(String firstNode, String secondNode) throws AtlasBaseException { + ObjectMapper mapper = new ObjectMapper(); + try { + JsonNode actualObj1 = mapper.readTree(firstNode); + JsonNode actualObj2 = mapper.readTree(secondNode); + //Ignore status field change + ((ObjectNode) actualObj1).remove(CONTRACT_ATTR_STATUS); + ((ObjectNode) actualObj2).remove(CONTRACT_ATTR_STATUS); + + return actualObj1.equals(actualObj2); + } catch (JsonProcessingException e) { + throw new AtlasBaseException(JSON_ERROR, e.getMessage()); + } + + } + + private void updateExistingVersion(EntityMutationContext context, AtlasEntity entity, AtlasEntity currentVersionEntity) throws AtlasBaseException { + removeCreatingVertex(context, entity); + entity.setAttribute(QUALIFIED_NAME, currentVersionEntity.getAttribute(QUALIFIED_NAME)); + entity.setGuid(currentVersionEntity.getGuid()); + AtlasVertex vertex = AtlasGraphUtilsV2.findByGuid(entity.getGuid()); + AtlasEntityType entityType = ensureEntityType(entity.getTypeName()); + + context.addUpdated(entity.getGuid(), entity, entityType, vertex); + recordEntityMutatedDetails(context, entity, vertex); + + } + + public AtlasEntity getCurrentVersion(String datasetGuid) throws AtlasBaseException { + IndexSearchParams indexSearchParams = new IndexSearchParams(); + Map dsl = new HashMap<>(); + int size = 1; + + List> mustClauseList = new ArrayList<>(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", CONTRACT_ENTITY_TYPE))); + mustClauseList.add(mapOf("term", mapOf(ATTR_ASSET_GUID, datasetGuid))); + + dsl.put("query", mapOf("bool", mapOf("must", mustClauseList))); + dsl.put("sort", Collections.singletonList(mapOf(ATTR_CONTRACT_VERSION, mapOf("order", "desc")))); + dsl.put("size", size); + + indexSearchParams.setDsl(dsl); + indexSearchParams.setAttributes(contractAttributes); + indexSearchParams.setSuppressLogs(true); + + AtlasSearchResult result = discovery.directIndexSearch(indexSearchParams); + if (result == null || CollectionUtils.isEmpty(result.getEntities())) { + return null; + } + return new AtlasEntity(result.getEntities().get(0)); + } + + private void removeCreatingVertex(EntityMutationContext context, AtlasEntity entity) { + context.getCreatedEntities().remove(entity); + graph.removeVertex(context.getVertex(entity.getGuid())); + } + + private void resetAllRelationshipAttributes(AtlasEntity entity) { + if (entity.getRemoveRelationshipAttributes() != null) { + entity.setRemoveRelationshipAttributes(null); + } + if (entity.getAppendRelationshipAttributes() != null) { + entity.setAppendRelationshipAttributes(null); + } + if (entity.getRelationshipAttributes() != null) { + entity.setRelationshipAttributes(null); + } + } + + private boolean syncContractCertificateStatus(AtlasEntity entity, DataContract contract) throws AtlasBaseException { + boolean contractSync = false; + // Sync certificateStatus + if (!Objects.equals(entity.getAttribute(ATTR_CERTIFICATE_STATUS), contract.getStatus().name())) { + /* + CertificateStatus | Status | Result + DRAFT VERIFIED cert -> VERIFIED > + VERIFIED DRAFT stat -> VERIFIED > + - DRAFT cert -> DRAFT + - VERIFIED cert -> VERIFIED > + DRAFT - stat -> DRAFT + VERIFIED - stat -> VERIFIED > + + */ + if (Objects.equals(entity.getAttribute(ATTR_CERTIFICATE_STATUS), DataContract.Status.VERIFIED.name())) { + contract.setStatus(String.valueOf(DataContract.Status.VERIFIED)); + contractSync = true; + } else if (Objects.equals(contract.getStatus(), DataContract.Status.VERIFIED)) { + entity.setAttribute(ATTR_CERTIFICATE_STATUS, DataContract.Status.VERIFIED.name()); + } else { + entity.setAttribute(ATTR_CERTIFICATE_STATUS, DataContract.Status.DRAFT); + contract.setStatus(String.valueOf(DataContract.Status.DRAFT)); + contractSync = true; + } + + } + return contractSync; + + } + + private void datasetAttributeSync(EntityMutationContext context, AtlasEntity associatedAsset, AtlasEntity contractAsset) throws AtlasBaseException { + // Creating new empty AtlasEntity to update with selective attributes only + AtlasEntity entity = new AtlasEntity(associatedAsset.getTypeName()); + entity.setGuid(associatedAsset.getGuid()); + entity.setAttribute(QUALIFIED_NAME, associatedAsset.getAttribute(QUALIFIED_NAME)); + if (associatedAsset.getAttribute(ASSET_ATTR_HAS_CONTRACT) == null || associatedAsset.getAttribute(ASSET_ATTR_HAS_CONTRACT).equals(false)) { + entity.setAttribute(ASSET_ATTR_HAS_CONTRACT, true); + } + + // Update relationship with contract + entity.setRelationshipAttribute(REL_ATTR_LATEST_CONTRACT, getAtlasObjectId(contractAsset)); + if (Objects.equals(contractAsset.getAttribute(ATTR_CERTIFICATE_STATUS), DataContract.Status.VERIFIED.name()) ) { + entity.setRelationshipAttribute(REL_ATTR_GOVERNED_ASSET_CERTIFIED, getAtlasObjectId(contractAsset)); + } + + AtlasVertex vertex = AtlasGraphUtilsV2.findByGuid(entity.getGuid()); + AtlasEntityType entityType = ensureEntityType(entity.getTypeName()); + context.addUpdated(entity.getGuid(), entity, entityType, vertex); + recordEntityMutatedDetails(context, entity, vertex); + } + + private void recordEntityMutatedDetails(EntityMutationContext context, AtlasEntity entity, AtlasVertex vertex) throws AtlasBaseException { + AtlasEntityComparator entityComparator = new AtlasEntityComparator(typeRegistry, entityRetriever, context.getGuidAssignments(), true, true); + AtlasEntityComparator.AtlasEntityDiffResult diffResult = entityComparator.getDiffResult(entity, vertex, !storeDifferentialAudits); + RequestContext reqContext = RequestContext.get(); + if (diffResult.hasDifference()) { + if (storeDifferentialAudits) { + diffResult.getDiffEntity().setGuid(entity.getGuid()); + reqContext.cacheDifferentialEntity(diffResult.getDiffEntity()); + } + } + } + + private static void validateAttribute(boolean isInvalid, String errorMessage) throws AtlasBaseException { + if (isInvalid) + throw new AtlasBaseException(BAD_REQUEST, errorMessage); + } +} diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/DataContract.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/DataContract.java new file mode 100644 index 00000000000..dc3cdb466b8 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/contract/DataContract.java @@ -0,0 +1,252 @@ +package org.apache.atlas.repository.store.graph.v2.preprocessor.contract; + +import java.lang.String; +import java.util.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import com.fasterxml.jackson.annotation.*; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.commons.lang.StringUtils; + +import javax.validation.*; +import javax.validation.constraints.NotNull; +import java.util.Set; + +import static org.apache.atlas.AtlasErrorCode.*; + + +@JsonIgnoreProperties(ignoreUnknown = true) +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonPropertyOrder({"kind", "status", "template_version", "data_source", "dataset", "type", "description", "owners", + "tags", "certificate", "columns"}) +public class DataContract { + private static final String KIND_VALUE = "DataContract"; + private static final Pattern versionPattern = Pattern.compile("^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$"); + private static final ObjectMapper objectMapper = new ObjectMapper(); + static { + objectMapper.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS); + } + + @Valid @NotNull + public String kind; + public Status status; + @JsonProperty(value = "template_version", defaultValue = "0.0.1") + public String templateVersion; + @Valid @NotNull + public String data_source; + @Valid @NotNull + public String dataset; + @Valid @NotNull + public DatasetType type; + public String description; + public List owners; + public List tags; + public String certificate; + @Valid + public List columns; + private final Map unknownFields = new HashMap<>(); + + public enum Status { + @JsonProperty("DRAFT") DRAFT, + @JsonProperty("VERIFIED") VERIFIED; + + public static Status from(String s) { + if(StringUtils.isEmpty(s)) { + return DRAFT; + } + switch (s.toLowerCase()) { + case "draft": + return DRAFT; + + case "verified": + return VERIFIED; + + default: + return DRAFT; + } + } + } + public enum DatasetType { + @JsonProperty("Table") Table, + @JsonProperty("View") View, + @JsonProperty("MaterialisedView") MaterialisedView; + + public static DatasetType from(String s) throws AtlasBaseException { + + switch (s.toLowerCase()) { + case "table": + return Table; + case "view": + return View; + case "materialisedview": + return MaterialisedView; + default: + throw new AtlasBaseException(String.format("dataset.type: %s value not supported yet.", s)); + } + } + } + + public Status getStatus() { + return status; + } + + public DatasetType getType() { + return type; + } + + @JsonAnyGetter + public Map getUnknownFields() { + return unknownFields; + } + + @JsonSetter("kind") + public void setKind(String kind) throws AtlasBaseException { + if (!KIND_VALUE.equals(kind)) { + throw new AtlasBaseException(AtlasErrorCode.INVALID_VALUE, "kind " + kind + " is inappropriate."); + } + this.kind = kind; + } + + @JsonSetter("status") + public void setStatus(String status) throws AtlasBaseException { + try { + this.status = Status.from(status); + } catch (IllegalArgumentException ex) { + throw new AtlasBaseException(AtlasErrorCode.INVALID_VALUE, "status " + status + " is inappropriate. Accepted values: " + Arrays.toString(Status.values())); + } + } + + public void setTemplateVersion(String templateVersion) throws AtlasBaseException { + if (!isSemVer(templateVersion)) { + throw new AtlasBaseException(AtlasErrorCode.INVALID_VALUE, "Invalid template_version syntax"); + } + this.templateVersion = templateVersion; + } + + @JsonSetter("data_source") + public void setDataSource(String data_source) { + this.data_source = data_source; + } + + public void setDataset(String dataset) { + this.dataset = dataset; + } + + public void setType(String type) throws AtlasBaseException { + try { + this.type = DatasetType.from(type); + } catch (IllegalArgumentException | AtlasBaseException ex) { + throw new AtlasBaseException(AtlasErrorCode.INVALID_VALUE, "type " + type + " is inappropriate. Accepted values: " + Arrays.toString(DatasetType.values())); + } + } + + public void setOwners(List owners) { + this.owners = owners; + } + + public void setTags(List tags) { + this.tags = tags; + } + + public void setColumns(List columns) { + this.columns = columns; + } + + @JsonAnySetter + public void setUnknownFields(String key, Object value) { + unknownFields.put(key, value); + } + + private boolean isSemVer(String version) { + Matcher matcher = versionPattern.matcher(version); + return matcher.matches(); + } + + @JsonIgnoreProperties(ignoreUnknown = true) + @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonPropertyOrder({"name"}) + public static final class BusinessTag { + @NotNull + public String name; + private final Map unknownFields = new HashMap<>(); + + @JsonAnySetter + public void setUnknownFields(String key, Object value) { + unknownFields.put(key, value); + } + @JsonAnyGetter + public Map getUnknownFields() { + return unknownFields; + } + + } + + @JsonIgnoreProperties(ignoreUnknown = true) + @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonPropertyOrder({"name", "description", "data_type"}) + public static final class Column { + @NotNull + public String name; + + public String description; + + public String data_type; + private final Map unknownFields = new HashMap<>(); + + @JsonAnySetter + public void setUnknownFields(String key, Object value) { + unknownFields.put(key, value); + } + @JsonAnyGetter + public Map getUnknownFields() { + return unknownFields; + } + } + + public static DataContract deserialize(String contractString) throws AtlasBaseException { + + if (StringUtils.isEmpty(contractString)) { + throw new AtlasBaseException(BAD_REQUEST, "Missing attribute: contract."); + } + + DataContract contract; + try { + contract = objectMapper.readValue(contractString, DataContract.class); + } catch (JsonProcessingException ex) { + throw new AtlasBaseException(ex.getOriginalMessage()); + } + contract.validate(); + return contract; + + } + + public void validate() throws AtlasBaseException { + Validator validator = Validation.buildDefaultValidatorFactory().getValidator(); + Set> violations = validator.validate(this); + if (!violations.isEmpty()) { + List errorMessageList = new ArrayList<>(); + for (ConstraintViolation violation : violations) { + errorMessageList.add(String.format("Field: %s -> %s", violation.getPropertyPath(), violation.getMessage())); + System.out.println(violation.getMessage()); + } + throw new AtlasBaseException(StringUtils.join(errorMessageList, "; ")); + } + + } + + public static String serialize(DataContract contract) throws AtlasBaseException { + + try { + return objectMapper.writeValueAsString(contract); + } catch (JsonProcessingException ex) { + throw new AtlasBaseException(JSON_ERROR, ex.getMessage()); + } + } + +} + diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/AbstractDomainPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/AbstractDomainPreProcessor.java new file mode 100644 index 00000000000..f62740bc67f --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/AbstractDomainPreProcessor.java @@ -0,0 +1,370 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh; + +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.AtlasException; +import org.apache.atlas.RequestContext; +import org.apache.atlas.authorize.AtlasAuthorizationUtils; +import org.apache.atlas.authorize.AtlasEntityAccessRequest; +import org.apache.atlas.authorize.AtlasPrivilege; +import org.apache.atlas.discovery.EntityDiscoveryService; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasEntityHeader; +import org.apache.atlas.model.instance.AtlasObjectId; +import org.apache.atlas.model.instance.EntityMutations; +import org.apache.atlas.repository.graph.GraphHelper; +import org.apache.atlas.repository.graphdb.AtlasGraph; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; +import org.apache.atlas.repository.store.graph.v2.EntityMutationContext; +import org.apache.atlas.repository.store.graph.v2.preprocessor.AuthPolicyPreProcessor; +import org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessor; +import org.apache.atlas.type.AtlasEntityType; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.accesscontrol.StakeholderPreProcessor.ATTR_DOMAIN_QUALIFIED_NAME; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh.StakeholderTitlePreProcessor.ATTR_DOMAIN_QUALIFIED_NAMES; +import static org.apache.atlas.repository.util.AccessControlUtils.ATTR_POLICY_RESOURCES; +import static org.apache.atlas.repository.util.AccessControlUtils.REL_ATTR_ACCESS_CONTROL; +import static org.apache.atlas.repository.util.AtlasEntityUtils.getListAttribute; +import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; + +public abstract class AbstractDomainPreProcessor implements PreProcessor { + private static final Logger LOG = LoggerFactory.getLogger(AbstractDomainPreProcessor.class); + + + protected final AtlasTypeRegistry typeRegistry; + protected final EntityGraphRetriever entityRetriever; + protected EntityGraphRetriever entityRetrieverNoRelations; + private final PreProcessor preProcessor; + protected EntityDiscoveryService discovery; + + private static final Set POLICY_ATTRIBUTES_FOR_SEARCH = new HashSet<>(Arrays.asList(ATTR_POLICY_RESOURCES)); + private static final Set STAKEHOLDER_ATTRIBUTES_FOR_SEARCH = new HashSet<>(Arrays.asList(ATTR_DOMAIN_QUALIFIED_NAMES, ATTR_DOMAIN_QUALIFIED_NAME)); + + static final Set PARENT_ATTRIBUTES = new HashSet<>(Arrays.asList(SUPER_DOMAIN_QN_ATTR, PARENT_DOMAIN_QN_ATTR)); + + static final Map customAttributes = new HashMap<>(); + + static { + customAttributes.put(MIGRATION_CUSTOM_ATTRIBUTE, "true"); + } + + AbstractDomainPreProcessor(AtlasTypeRegistry typeRegistry, EntityGraphRetriever entityRetriever, AtlasGraph graph) { + this.entityRetriever = entityRetriever; + this.typeRegistry = typeRegistry; + this.preProcessor = new AuthPolicyPreProcessor(graph, typeRegistry, entityRetriever); + + try { + this.entityRetrieverNoRelations = new EntityGraphRetriever(graph, typeRegistry, true); + this.discovery = new EntityDiscoveryService(typeRegistry, graph, null, null, null, null); + } catch (AtlasException e) { + e.printStackTrace(); + } + } + + protected void isAuthorized(AtlasEntityHeader sourceDomain, AtlasEntityHeader targetDomain) throws AtlasBaseException { + + if(sourceDomain != null){ + // source -> CREATE + UPDATE + DELETE + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_CREATE, sourceDomain), + "create on source Domain: ", sourceDomain.getAttribute(NAME)); + + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_UPDATE, sourceDomain), + "update on source Domain: ", sourceDomain.getAttribute(NAME)); + } + + if(targetDomain != null){ + // target -> CREATE + UPDATE + DELETE + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_CREATE, targetDomain), + "create on target Domain: ", targetDomain.getAttribute(NAME)); + + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_UPDATE, targetDomain), + "update on target Domain: ", targetDomain.getAttribute(NAME)); + } + } + + protected void updatePolicies(Map updatedPolicyResources, EntityMutationContext context) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("updatePolicies"); + try { + AtlasEntityType entityType = typeRegistry.getEntityTypeByName(POLICY_ENTITY_TYPE); + + if (MapUtils.isEmpty(updatedPolicyResources)) { + return; + } + + List policies = getPolicies(updatedPolicyResources.keySet()); + LOG.info("Found {} policies to update", policies.size()); + + if (CollectionUtils.isNotEmpty(policies)) { + for (AtlasEntityHeader policy : policies) { + LOG.info("Updating Policy {}", policy.getGuid()); + AtlasVertex policyVertex = entityRetriever.getEntityVertex(policy.getGuid()); + + AtlasEntity policyEntity = entityRetriever.toAtlasEntity(policyVertex); + + if (policyEntity.hasRelationshipAttribute(REL_ATTR_ACCESS_CONTROL) && policyEntity.getRelationshipAttribute(REL_ATTR_ACCESS_CONTROL) != null) { + AtlasObjectId accessControlObjId = (AtlasObjectId) policyEntity.getRelationshipAttribute(REL_ATTR_ACCESS_CONTROL); + AtlasVertex accessControl = entityRetriever.getEntityVertex(accessControlObjId.getGuid()); + context.getDiscoveryContext().addResolvedGuid(GraphHelper.getGuid(accessControl), accessControl); + } + + List policyResources = (List) policyEntity.getAttribute(ATTR_POLICY_RESOURCES); + + List updatedPolicyResourcesList = new ArrayList<>(); + + for (String resource : policyResources) { + if (updatedPolicyResources.containsKey(resource)) { + updatedPolicyResourcesList.add(updatedPolicyResources.get(resource)); + } else { + updatedPolicyResourcesList.add(resource); + } + } + Map updatedAttributes = new HashMap<>(); + updatedAttributes.put(ATTR_POLICY_RESOURCES, updatedPolicyResourcesList); + + //policyVertex.removeProperty(ATTR_POLICY_RESOURCES); + policyEntity.setAttribute(ATTR_POLICY_RESOURCES, updatedPolicyResourcesList); + + context.addUpdated(policyEntity.getGuid(), policyEntity, entityType, policyVertex); + recordUpdatedChildEntities(policyVertex, updatedAttributes); + this.preProcessor.processAttributes(policyEntity, context, EntityMutations.EntityOperation.UPDATE); + } + } + + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + protected void updateStakeholderTitlesAndStakeholders(Map updatedDomainQualifiedNames, EntityMutationContext context) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("updateStakeholderTitlesAndStakeholders"); + try { + + if (MapUtils.isEmpty(updatedDomainQualifiedNames)) { + return; + } + + List assets = getStakeholderTitlesAndStakeholders(updatedDomainQualifiedNames.keySet()); + + if (CollectionUtils.isNotEmpty(assets)) { + for (AtlasEntityHeader asset : assets) { + AtlasVertex vertex = entityRetrieverNoRelations.getEntityVertex(asset.getGuid()); + AtlasEntity entity = entityRetrieverNoRelations.toAtlasEntity(vertex); + Map updatedAttributes = new HashMap<>(); + AtlasEntityType entityType = null; + + if (entity.getTypeName().equals(STAKEHOLDER_ENTITY_TYPE)) { + entityType = typeRegistry.getEntityTypeByName(STAKEHOLDER_ENTITY_TYPE); + + String currentDomainQualifiedName = (String) asset.getAttribute(ATTR_DOMAIN_QUALIFIED_NAME); + + entity.setAttribute(ATTR_DOMAIN_QUALIFIED_NAME, updatedDomainQualifiedNames.get(currentDomainQualifiedName)); + updatedAttributes.put(ATTR_DOMAIN_QUALIFIED_NAME, updatedDomainQualifiedNames.get(currentDomainQualifiedName)); + + } else if (entity.getTypeName().equals(STAKEHOLDER_TITLE_ENTITY_TYPE)) { + entityType = typeRegistry.getEntityTypeByName(STAKEHOLDER_TITLE_ENTITY_TYPE); + + List currentDomainQualifiedNames = getListAttribute(asset, ATTR_DOMAIN_QUALIFIED_NAMES); + + List newDomainQualifiedNames = new ArrayList<>(); + + for (String qualifiedName : currentDomainQualifiedNames) { + if (updatedDomainQualifiedNames.containsKey(qualifiedName)) { + newDomainQualifiedNames.add(updatedDomainQualifiedNames.get(qualifiedName)); + } else { + newDomainQualifiedNames.add(qualifiedName); + } + } + + entity.setAttribute(ATTR_DOMAIN_QUALIFIED_NAMES, newDomainQualifiedNames); + updatedAttributes.put(ATTR_DOMAIN_QUALIFIED_NAMES, newDomainQualifiedNames); + } + + context.addUpdated(entity.getGuid(), entity, entityType, vertex); + recordUpdatedChildEntities(vertex, updatedAttributes); + } + } + + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + protected void exists(String assetType, String assetName, String parentDomainQualifiedName, String guid) throws AtlasBaseException { + boolean exists = false; + + List> mustClauseList = new ArrayList(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", assetType))); + mustClauseList.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustClauseList.add(mapOf("term", mapOf("name.keyword", assetName))); + List> mustNotClauseList = new ArrayList(); + if(StringUtils.isNotEmpty(guid)){ + mustNotClauseList.add(mapOf("term", mapOf("__guid", guid))); + } + + Map bool = new HashMap<>(); + if (StringUtils.isNotEmpty(parentDomainQualifiedName)) { + mustClauseList.add(mapOf("term", mapOf("parentDomainQualifiedName", parentDomainQualifiedName))); + } else { + mustNotClauseList.add(mapOf("exists", mapOf("field", "parentDomainQualifiedName"))); + } + + bool.put("must", mustClauseList); + if(!mustNotClauseList.isEmpty()) { + bool.put("must_not", mustNotClauseList); + } + Map dsl = mapOf("query", mapOf("bool", bool)); + + List assets = indexSearchPaginated(dsl, null, this.discovery); + + if (CollectionUtils.isNotEmpty(assets)) { + for (AtlasEntityHeader asset : assets) { + String name = (String) asset.getAttribute(NAME); + if (assetName.equals(name)) { + exists = true; + break; + } + } + } + + if (exists) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, + String.format("%s with name %s already exists in the domain", assetType, assetName)); + } + } + + protected List getPolicies(Set resources) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("getPolicies"); + try { + List> mustClauseList = new ArrayList<>(); + mustClauseList.add(mapOf("term", mapOf("__typeName.keyword", POLICY_ENTITY_TYPE))); + mustClauseList.add(mapOf("term", mapOf("__state", "ACTIVE"))); + mustClauseList.add(mapOf("terms", mapOf("policyResources", resources))); + + Map bool = new HashMap<>(); + bool.put("must", mustClauseList); + + Map dsl = mapOf("query", mapOf("bool", bool)); + + return indexSearchPaginated(dsl, POLICY_ATTRIBUTES_FOR_SEARCH, discovery); + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + protected List getStakeholderTitlesAndStakeholders(Set qualifiedNames) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("getStakeholderTitlesAndStakeholders"); + try { + List> mustClauseList = new ArrayList<>(); + mustClauseList.add(mapOf("terms", mapOf("__typeName.keyword", Arrays.asList(STAKEHOLDER_ENTITY_TYPE, STAKEHOLDER_TITLE_ENTITY_TYPE)))); + + List> shouldClauseList = new ArrayList<>(); + shouldClauseList.add(mapOf("terms", mapOf("stakeholderTitleDomainQualifiedNames", qualifiedNames))); + shouldClauseList.add(mapOf("terms", mapOf("stakeholderDomainQualifiedName", qualifiedNames))); + + mustClauseList.add(mapOf("bool", mapOf("should", shouldClauseList))); + + Map bool = new HashMap<>(); + bool.put("must", mustClauseList); + + Map dsl = mapOf("query", mapOf("bool", bool)); + + return indexSearchPaginated(dsl, STAKEHOLDER_ATTRIBUTES_FOR_SEARCH, discovery); + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + /** + * Record the updated child entities, it will be used to send notification and store audit logs + * @param entityVertex Child entity vertex + * @param updatedAttributes Updated attributes while updating required attributes on updating collection + */ + protected void recordUpdatedChildEntities(AtlasVertex entityVertex, Map updatedAttributes) { + RequestContext requestContext = RequestContext.get(); + AtlasPerfMetrics.MetricRecorder metricRecorder = requestContext.startMetricRecord("recordUpdatedChildEntities"); + AtlasEntity entity = new AtlasEntity(); + entity = entityRetriever.mapSystemAttributes(entityVertex, entity); + entity.setAttributes(updatedAttributes); + requestContext.cacheDifferentialEntity(new AtlasEntity(entity)); + + AtlasEntityType entityType = typeRegistry.getEntityTypeByName(entity.getTypeName()); + + //Add the min info attributes to entity header to be sent as part of notification + if(entityType != null) { + AtlasEntity finalEntity = entity; + entityType.getMinInfoAttributes().values().stream().filter(attribute -> !updatedAttributes.containsKey(attribute.getName())).forEach(attribute -> { + Object attrValue = null; + try { + attrValue = entityRetriever.getVertexAttribute(entityVertex, attribute); + } catch (AtlasBaseException e) { + LOG.error("Error while getting vertex attribute", e); + } + if(attrValue != null) { + finalEntity.setAttribute(attribute.getName(), attrValue); + } + }); + requestContext.recordEntityUpdate(new AtlasEntityHeader(finalEntity)); + } + + requestContext.endMetricRecord(metricRecorder); + } + + protected AtlasEntityHeader getParent(Object parentObject, Set attributes) throws AtlasBaseException { + if (parentObject == null) { + return null; + } + + AtlasObjectId objectId; + if (parentObject instanceof Map) { + objectId = getAtlasObjectIdFromMapObject(parentObject); + } else { + objectId = (AtlasObjectId) parentObject; + } + + AtlasVertex parentVertex = entityRetriever.getEntityVertex(objectId); + return entityRetriever.toAtlasEntityHeader(parentVertex, attributes); + } + + public static AtlasObjectId getAtlasObjectIdFromMapObject(Object obj) { + Map parentMap = (Map) obj; + AtlasObjectId objectId = new AtlasObjectId(); + objectId.setTypeName((String) parentMap.get("typeName")); + + if (parentMap.containsKey("guid")) { + objectId.setGuid((String) parentMap.get("guid")); + } else { + objectId.setUniqueAttributes((Map) parentMap.get("uniqueAttributes")); + } + + return objectId; + } +} diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/DataDomainPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/DataDomainPreProcessor.java new file mode 100644 index 00000000000..c056b2ad68f --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/DataDomainPreProcessor.java @@ -0,0 +1,392 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh; + + +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.RequestContext; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasEntityHeader; +import org.apache.atlas.model.instance.AtlasObjectId; +import org.apache.atlas.model.instance.AtlasRelatedObjectId; +import org.apache.atlas.model.instance.AtlasStruct; +import org.apache.atlas.model.instance.EntityMutations; +import org.apache.atlas.repository.graph.GraphHelper; +import org.apache.atlas.repository.graphdb.AtlasGraph; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; +import org.apache.atlas.repository.store.graph.v2.EntityMutationContext; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.graph.GraphHelper.getActiveChildrenVertices; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; + +public class DataDomainPreProcessor extends AbstractDomainPreProcessor { + private static final Logger LOG = LoggerFactory.getLogger(DataDomainPreProcessor.class); + + private EntityMutationContext context; + private Map updatedPolicyResources; + private EntityGraphRetriever retrieverNoRelation = null; + private Map updatedDomainQualifiedNames; + + public DataDomainPreProcessor(AtlasTypeRegistry typeRegistry, EntityGraphRetriever entityRetriever, + AtlasGraph graph) { + super(typeRegistry, entityRetriever, graph); + this.updatedPolicyResources = new HashMap<>(); + this.retrieverNoRelation = new EntityGraphRetriever(graph, typeRegistry, true); + this.updatedDomainQualifiedNames = new HashMap<>(); + } + + @Override + public void processAttributes(AtlasStruct entityStruct, EntityMutationContext context, + EntityMutations.EntityOperation operation) throws AtlasBaseException { + if (LOG.isDebugEnabled()) { + LOG.debug("DataDomainPreProcessor.processAttributes: pre processing {}, {}", + entityStruct.getAttribute(QUALIFIED_NAME), operation); + } + + this.context = context; + + AtlasEntity entity = (AtlasEntity) entityStruct; + + switch (operation) { + case CREATE: + processCreateDomain(entity); + break; + case UPDATE: + AtlasVertex vertex = context.getVertex(entity.getGuid()); + processUpdateDomain(entity, vertex); + break; + } + } + + private void processCreateDomain(AtlasEntity entity) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processCreateDomain"); + + validateStakeholderRelationship(entity); + + String domainName = (String) entity.getAttribute(NAME); + + String parentDomainQualifiedName = ""; + AtlasObjectId parentDomainObject = (AtlasObjectId) entity.getRelationshipAttribute(PARENT_DOMAIN_REL_TYPE); + AtlasVertex parentDomain = null; + + if(parentDomainObject != null ){ + parentDomain = retrieverNoRelation.getEntityVertex(parentDomainObject); + parentDomainQualifiedName = parentDomain.getProperty(QUALIFIED_NAME, String.class); + if(StringUtils.isNotEmpty(parentDomainQualifiedName)) { + entity.setAttribute(PARENT_DOMAIN_QN_ATTR, parentDomainQualifiedName); + String superDomainQualifiedName = parentDomain.getProperty(SUPER_DOMAIN_QN_ATTR, String.class); + if(StringUtils.isEmpty(parentDomain.getProperty(SUPER_DOMAIN_QN_ATTR, String.class))) { + superDomainQualifiedName = parentDomainQualifiedName; + } + entity.setAttribute(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + } + } else { + entity.removeAttribute(PARENT_DOMAIN_QN_ATTR); + entity.removeAttribute(SUPER_DOMAIN_QN_ATTR); + } + + entity.setAttribute(QUALIFIED_NAME, createQualifiedName(parentDomainQualifiedName)); + + + entity.setCustomAttributes(customAttributes); + + domainExists(domainName, parentDomainQualifiedName, null); + + RequestContext.get().endMetricRecord(metricRecorder); + } + + private void processUpdateDomain(AtlasEntity entity, AtlasVertex vertex) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processUpdateDomain"); + + // Validate Relationship + if(entity.hasRelationshipAttribute(SUB_DOMAIN_REL_TYPE) || entity.hasRelationshipAttribute(DATA_PRODUCT_REL_TYPE)){ + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Cannot update Domain's subDomains or dataProducts relations"); + } + + validateStakeholderRelationship(entity); + + String vertexQnName = vertex.getProperty(QUALIFIED_NAME, String.class); + + AtlasEntity storedDomain = entityRetriever.toAtlasEntity(vertex); + AtlasRelatedObjectId currentParentDomainObjectId = (AtlasRelatedObjectId) storedDomain.getRelationshipAttribute(PARENT_DOMAIN_REL_TYPE); + + String newSuperDomainQualifiedName = ""; + String newParentDomainQualifiedName = ""; + String currentParentDomainQualifiedName = ""; + + AtlasEntityHeader currentParentDomainHeader = null; + + if(currentParentDomainObjectId != null){ + currentParentDomainHeader = entityRetriever.toAtlasEntityHeader(currentParentDomainObjectId.getGuid()); + currentParentDomainQualifiedName = (String) currentParentDomainHeader.getAttribute(QUALIFIED_NAME); + } + + AtlasEntityHeader newParentDomainHeader = getParent(entity); + if (newParentDomainHeader != null) { + newParentDomainQualifiedName = (String) newParentDomainHeader.getAttribute(QUALIFIED_NAME); + + newSuperDomainQualifiedName = (String) newParentDomainHeader.getAttribute(SUPER_DOMAIN_QN_ATTR); + if(StringUtils.isEmpty(newSuperDomainQualifiedName)) { + newSuperDomainQualifiedName = newParentDomainQualifiedName; + } + } + + if (!newParentDomainQualifiedName.equals(currentParentDomainQualifiedName) && entity.hasRelationshipAttribute(PARENT_DOMAIN_REL_TYPE)) { + if(storedDomain.getRelationshipAttribute(PARENT_DOMAIN_REL_TYPE) == null && + StringUtils.isEmpty( (String) storedDomain.getAttribute(PARENT_DOMAIN_QN_ATTR))){ + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Cannot move Super Domain inside another domain"); + } + + //Auth check + isAuthorized(currentParentDomainHeader, newParentDomainHeader); + + processMoveSubDomainToAnotherDomain(entity, vertex, currentParentDomainQualifiedName, newParentDomainQualifiedName, vertexQnName, newSuperDomainQualifiedName); + + } else { + String domainCurrentName = vertex.getProperty(NAME, String.class); + String domainNewName = (String) entity.getAttribute(NAME); + + entity.removeAttribute(PARENT_DOMAIN_QN_ATTR); + entity.removeAttribute(SUPER_DOMAIN_QN_ATTR); + + if (!domainCurrentName.equals(domainNewName)) { + domainExists(domainNewName, currentParentDomainQualifiedName, storedDomain.getGuid()); + } + entity.setAttribute(QUALIFIED_NAME, vertexQnName); + } + + RequestContext.get().endMetricRecord(metricRecorder); + } + + private void processMoveSubDomainToAnotherDomain(AtlasEntity domain, + AtlasVertex domainVertex, + String sourceDomainQualifiedName, + String targetDomainQualifiedName, + String currentDomainQualifiedName, + String superDomainQualifiedName) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("processMoveSubDomainToAnotherDomain"); + + try { + String domainName = (String) domain.getAttribute(NAME); + String updatedQualifiedName = ""; + + LOG.info("Moving subdomain {} to Domain {}", domainName, targetDomainQualifiedName); + + domainExists(domainName, targetDomainQualifiedName, domain.getGuid()); + + if(targetDomainQualifiedName.isEmpty()){ + //Moving subDomain to make it Super Domain + targetDomainQualifiedName = "default"; + updatedQualifiedName = currentDomainQualifiedName.replace(sourceDomainQualifiedName, targetDomainQualifiedName); + updatedQualifiedName = updatedQualifiedName + "/super"; + domain.setAttribute(QUALIFIED_NAME, updatedQualifiedName); + domain.setAttribute(PARENT_DOMAIN_QN_ATTR, null); + domain.setAttribute(SUPER_DOMAIN_QN_ATTR, null); + superDomainQualifiedName = updatedQualifiedName ; + } + else{ + if(StringUtils.isEmpty(sourceDomainQualifiedName)){ + updatedQualifiedName = createQualifiedName(targetDomainQualifiedName); + }else { + updatedQualifiedName = currentDomainQualifiedName.replace(sourceDomainQualifiedName, targetDomainQualifiedName); + } + + domain.setAttribute(QUALIFIED_NAME, updatedQualifiedName); + domain.setAttribute(PARENT_DOMAIN_QN_ATTR, targetDomainQualifiedName); + domain.setAttribute(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + } + + String currentQualifiedName = domainVertex.getProperty(QUALIFIED_NAME, String.class); + this.updatedPolicyResources.put("entity:" + currentQualifiedName, "entity:" + updatedQualifiedName); + this.updatedDomainQualifiedNames.put(currentQualifiedName, updatedQualifiedName); + + moveChildren(domainVertex, superDomainQualifiedName, updatedQualifiedName, sourceDomainQualifiedName, targetDomainQualifiedName); + updatePolicies(this.updatedPolicyResources, this.context); + updateStakeholderTitlesAndStakeholders(this.updatedDomainQualifiedNames, this.context); + + LOG.info("Moved subDomain {} to Domain {}", domainName, targetDomainQualifiedName); + + } finally { + RequestContext.get().endMetricRecord(recorder); + } + } + + private void moveChildren(AtlasVertex domainVertex, + String superDomainQualifiedName, + String parentDomainQualifiedName, + String sourceDomainQualifiedName, + String targetDomainQualifiedName) throws AtlasBaseException { + // move products to target Domain + Iterator products = getActiveChildrenVertices(domainVertex, DATA_PRODUCT_EDGE_LABEL); + while (products.hasNext()) { + AtlasVertex productVertex = products.next(); + moveChildDataProductToAnotherDomain(productVertex, superDomainQualifiedName, parentDomainQualifiedName, sourceDomainQualifiedName, targetDomainQualifiedName); + } + // Get all children domains of current domain + Iterator childDomains = getActiveChildrenVertices(domainVertex, DOMAIN_PARENT_EDGE_LABEL); + while (childDomains.hasNext()) { + AtlasVertex childVertex = childDomains.next(); + moveChildrenToAnotherDomain(childVertex, superDomainQualifiedName, parentDomainQualifiedName, sourceDomainQualifiedName, targetDomainQualifiedName); + } + } + + private void moveChildrenToAnotherDomain(AtlasVertex childDomainVertex, + String superDomainQualifiedName, + String parentDomainQualifiedName, + String sourceDomainQualifiedName, + String targetDomainQualifiedName) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("moveChildrenToAnotherDomain"); + + + try { + LOG.info("Moving child domain {} to Domain {}", childDomainVertex.getProperty(NAME, String.class), targetDomainQualifiedName); + Map updatedAttributes = new HashMap<>(); + + String currentDomainQualifiedName = childDomainVertex.getProperty(QUALIFIED_NAME, String.class); + String updatedDomainQualifiedName = parentDomainQualifiedName + getOwnQualifiedNameForChild(currentDomainQualifiedName); + + // Change domain qualifiedName + childDomainVertex.setProperty(QUALIFIED_NAME, updatedDomainQualifiedName); + updatedAttributes.put(QUALIFIED_NAME, updatedDomainQualifiedName); + + //change superDomainQN, parentDomainQN + childDomainVertex.setProperty(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + childDomainVertex.setProperty(PARENT_DOMAIN_QN_ATTR, parentDomainQualifiedName); + + //Store domainPolicies and resources to be updated + String currentResource = "entity:"+ currentDomainQualifiedName; + String updatedResource = "entity:"+ updatedDomainQualifiedName; + this.updatedPolicyResources.put(currentResource, updatedResource); + this.updatedDomainQualifiedNames.put(currentDomainQualifiedName, updatedDomainQualifiedName); + + //update system properties + GraphHelper.setModifiedByAsString(childDomainVertex, RequestContext.get().getUser()); + GraphHelper.setModifiedTime(childDomainVertex, System.currentTimeMillis()); + + // move products to target Domain + Iterator products = getActiveChildrenVertices(childDomainVertex, DATA_PRODUCT_EDGE_LABEL); + + while (products.hasNext()) { + AtlasVertex productVertex = products.next(); + moveChildDataProductToAnotherDomain(productVertex, superDomainQualifiedName, updatedDomainQualifiedName, sourceDomainQualifiedName, targetDomainQualifiedName); + } + + // Get all children domains of current domain + Iterator childDomains = getActiveChildrenVertices(childDomainVertex, DOMAIN_PARENT_EDGE_LABEL); + + while (childDomains.hasNext()) { + AtlasVertex childVertex = childDomains.next(); + moveChildrenToAnotherDomain(childVertex, superDomainQualifiedName, updatedDomainQualifiedName, sourceDomainQualifiedName, targetDomainQualifiedName); + } + + recordUpdatedChildEntities(childDomainVertex, updatedAttributes); + + LOG.info("Moved child domain {} to Domain {}", childDomainVertex.getProperty(NAME, String.class), targetDomainQualifiedName); + } finally { + RequestContext.get().endMetricRecord(recorder); + } + } + + private void moveChildDataProductToAnotherDomain(AtlasVertex productVertex, + String superDomainQualifiedName, + String parentDomainQualifiedName, + String sourceDomainQualifiedName, + String targetDomainQualifiedName) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("moveChildDataProductToAnotherDomain"); + + try { + String productName = productVertex.getProperty(NAME, String.class); + LOG.info("Moving dataProduct {} to Domain {}", productName, targetDomainQualifiedName); + Map updatedAttributes = new HashMap<>(); + + String currentQualifiedName = productVertex.getProperty(QUALIFIED_NAME, String.class); + String updatedQualifiedName = parentDomainQualifiedName + getOwnQualifiedNameForChild(currentQualifiedName); + + productVertex.setProperty(QUALIFIED_NAME, updatedQualifiedName); + updatedAttributes.put(QUALIFIED_NAME, updatedQualifiedName); + + productVertex.setProperty(PARENT_DOMAIN_QN_ATTR, parentDomainQualifiedName); + productVertex.setProperty(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + + //Store domainPolicies and resources to be updated + String currentResource = "entity:"+ currentQualifiedName; + String updatedResource = "entity:"+ updatedQualifiedName; + this.updatedPolicyResources.put(currentResource, updatedResource); + + //update system properties + GraphHelper.setModifiedByAsString(productVertex, RequestContext.get().getUser()); + GraphHelper.setModifiedTime(productVertex, System.currentTimeMillis()); + + recordUpdatedChildEntities(productVertex, updatedAttributes); + + LOG.info("Moved dataProduct {} to Domain {}", productName, targetDomainQualifiedName); + } finally { + RequestContext.get().endMetricRecord(recorder); + } + } + + private AtlasEntityHeader getParent(AtlasEntity domainEntity) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("DataDomainPreProcessor.getParent"); + + AtlasObjectId objectId = (AtlasObjectId) domainEntity.getRelationshipAttribute(PARENT_DOMAIN_REL_TYPE); + + RequestContext.get().endMetricRecord(metricRecorder); + return getParent(objectId, PARENT_ATTRIBUTES); + } + + private void domainExists(String domainName, String parentDomainQualifiedName,String guid) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("domainExists"); + try { + exists(DATA_DOMAIN_ENTITY_TYPE, domainName, parentDomainQualifiedName, guid); + + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + private static String createQualifiedName(String parentDomainQualifiedName) { + if (StringUtils.isNotEmpty(parentDomainQualifiedName)) { + return parentDomainQualifiedName + "/domain/" + getUUID(); + } else{ + return "default/domain/" + getUUID() + "/super"; + } + } + + private String getOwnQualifiedNameForChild(String childQualifiedName) { + String[] splitted = childQualifiedName.split("/"); + return String.format("/%s/%s", splitted[splitted.length -2], splitted[splitted.length -1]); + } + + private void validateStakeholderRelationship(AtlasEntity entity) throws AtlasBaseException { + if(entity.hasRelationshipAttribute(STAKEHOLDER_REL_TYPE)){ + throw new AtlasBaseException(AtlasErrorCode.OPERATION_NOT_SUPPORTED, "Managing Stakeholders while creating/updating a domain"); + } + } +} + + diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/DataProductPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/DataProductPreProcessor.java new file mode 100644 index 00000000000..0851baa95ce --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/DataProductPreProcessor.java @@ -0,0 +1,446 @@ +package org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh; + +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.DeleteType; +import org.apache.atlas.RequestContext; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.instance.*; +import org.apache.atlas.repository.graphdb.AtlasGraph; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.store.graph.AtlasEntityStore; +import org.apache.atlas.repository.store.graph.v2.AtlasEntityStream; +import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; +import org.apache.atlas.repository.store.graph.v2.EntityMutationContext; +import org.apache.atlas.repository.store.graph.v2.EntityStream; +import org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils; +import org.apache.atlas.repository.util.AtlasEntityUtils; +import org.apache.atlas.type.AtlasEntityType; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static org.apache.atlas.AtlasErrorCode.OPERATION_NOT_SUPPORTED; +import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; +import static org.apache.atlas.repository.util.AccessControlUtils.*; + +public class DataProductPreProcessor extends AbstractDomainPreProcessor { + private static final Logger LOG = LoggerFactory.getLogger(DataProductPreProcessor.class); + private static final String PRIVATE = "Private"; + private static final String PROTECTED = "Protected"; + private static final String PUBLIC = "Public"; + private static final String DATA_PRODUCT = "dataProduct"; + + + + private EntityMutationContext context; + private AtlasEntityStore entityStore; + private Map updatedPolicyResources; + private EntityGraphRetriever retrieverNoRelation = null; + + public DataProductPreProcessor(AtlasTypeRegistry typeRegistry, EntityGraphRetriever entityRetriever, + AtlasGraph graph, AtlasEntityStore entityStore) { + super(typeRegistry, entityRetriever, graph); + this.updatedPolicyResources = new HashMap<>(); + this.entityStore = entityStore; + this.retrieverNoRelation = new EntityGraphRetriever(graph, typeRegistry, true); + } + + @Override + public void processAttributes(AtlasStruct entityStruct, EntityMutationContext context, + EntityMutations.EntityOperation operation) throws AtlasBaseException { + if (LOG.isDebugEnabled()) { + LOG.debug("DataProductPreProcessor.processAttributes: pre processing {}, {}", + entityStruct.getAttribute(QUALIFIED_NAME), operation); + } + this.context = context; + + AtlasEntity entity = (AtlasEntity) entityStruct; + + AtlasVertex vertex = context.getVertex(entity.getGuid()); + + switch (operation) { + case CREATE: + processCreateProduct(entity, vertex); + break; + case UPDATE: + processUpdateProduct(entity, vertex); + break; + } + } + + private void processCreateProduct(AtlasEntity entity,AtlasVertex vertex) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processCreateProduct"); + AtlasObjectId parentDomainObject = (AtlasObjectId) entity.getRelationshipAttribute(DATA_DOMAIN_REL_TYPE); + String productName = (String) entity.getAttribute(NAME); + String parentDomainQualifiedName = ""; + + entity.removeAttribute(OUTPUT_PORT_GUIDS_ATTR); + entity.removeAttribute(INPUT_PORT_GUIDS_ATTR); + + if (parentDomainObject == null) { + throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Cannot create a Product without a Domain Relationship"); + } else { + AtlasVertex parentDomain = retrieverNoRelation.getEntityVertex(parentDomainObject); + parentDomainQualifiedName = parentDomain.getProperty(QUALIFIED_NAME, String.class); + + + entity.setAttribute(PARENT_DOMAIN_QN_ATTR, parentDomainQualifiedName); + + String superDomainQualifiedName = parentDomain.getProperty(SUPER_DOMAIN_QN_ATTR, String.class); + if(StringUtils.isEmpty(superDomainQualifiedName)) { + superDomainQualifiedName = parentDomainQualifiedName; + } + entity.setAttribute(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + } + + entity.setAttribute(QUALIFIED_NAME, createQualifiedName(parentDomainQualifiedName)); + + productExists(productName, parentDomainQualifiedName, null); + + createDaapVisibilityPolicy(entity, vertex); + + RequestContext.get().endMetricRecord(metricRecorder); + } + + private void processUpdateProduct(AtlasEntity entity, AtlasVertex vertex) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processUpdateProduct"); + + entity.removeAttribute(OUTPUT_PORT_GUIDS_ATTR); + entity.removeAttribute(INPUT_PORT_GUIDS_ATTR); + + if(entity.hasRelationshipAttribute(DATA_DOMAIN_REL_TYPE) && entity.getRelationshipAttribute(DATA_DOMAIN_REL_TYPE) == null){ + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "DataProduct can only be moved to another Domain."); + } + + String vertexQnName = vertex.getProperty(QUALIFIED_NAME, String.class); + + AtlasEntity storedProduct = entityRetriever.toAtlasEntity(vertex); + AtlasRelatedObjectId currentParentDomainObjectId = (AtlasRelatedObjectId) storedProduct.getRelationshipAttribute(DATA_DOMAIN_REL_TYPE); + + String newParentDomainQualifiedName = null; + String currentParentDomainQualifiedName = null; + AtlasEntityHeader currentParentDomainHeader = null; + + if(currentParentDomainObjectId != null) { + currentParentDomainHeader = entityRetriever.toAtlasEntityHeader(currentParentDomainObjectId.getGuid()); + currentParentDomainQualifiedName = (String) currentParentDomainHeader.getAttribute(QUALIFIED_NAME); + } + + AtlasEntityHeader newParentDomainHeader = getParent(entity); + if (newParentDomainHeader != null) { + newParentDomainQualifiedName = (String) newParentDomainHeader.getAttribute(QUALIFIED_NAME); + } + + boolean isDaapVisibilityChanged = isDaapVisibilityChanged(storedProduct, entity); + + if (newParentDomainQualifiedName != null && !newParentDomainQualifiedName.equals(currentParentDomainQualifiedName)) { + + if(isDaapVisibilityChanged){ + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Moving the product to another domain along with the change in Daap visibility is not allowed"); + } + + //Auth check + isAuthorized(currentParentDomainHeader, newParentDomainHeader); + + String newSuperDomainQualifiedName = (String) newParentDomainHeader.getAttribute(SUPER_DOMAIN_QN_ATTR); + if(StringUtils.isEmpty(newSuperDomainQualifiedName)){ + newSuperDomainQualifiedName = newParentDomainQualifiedName; + } + + processMoveDataProductToAnotherDomain(entity, currentParentDomainQualifiedName, newParentDomainQualifiedName, vertexQnName, newSuperDomainQualifiedName); + + updatePolicies(this.updatedPolicyResources, this.context); + + } else { + entity.removeAttribute(PARENT_DOMAIN_QN_ATTR); + entity.removeAttribute(SUPER_DOMAIN_QN_ATTR); + String productCurrentName = vertex.getProperty(NAME, String.class); + String productNewName = (String) entity.getAttribute(NAME); + + if (!productCurrentName.equals(productNewName)) { + productExists(productNewName, currentParentDomainQualifiedName, storedProduct.getGuid()); + } + entity.setAttribute(QUALIFIED_NAME, vertexQnName); + } + + if (isDaapVisibilityChanged) { + updateDaapVisibilityPolicy(entity, storedProduct); + } + else{ + // if isDaapVisibilityChanged is false, then do not update any daap visibility attributes in product entity as well + entity.removeAttribute(DAAP_VISIBILITY_USERS_ATTR); + entity.removeAttribute(DAAP_VISIBILITY_GROUPS_ATTR); + } + RequestContext.get().endMetricRecord(metricRecorder); + } + + private void processMoveDataProductToAnotherDomain(AtlasEntity product, + String sourceDomainQualifiedName, + String targetDomainQualifiedName, + String currentDataProductQualifiedName, + String superDomainQualifiedName) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder recorder = RequestContext.get().startMetricRecord("processMoveDataProductToAnotherDomain"); + + try { + String productName = (String) product.getAttribute(NAME); + + LOG.info("Moving dataProduct {} to Domain {}", productName, targetDomainQualifiedName); + + productExists(productName, targetDomainQualifiedName, product.getGuid()); + + String updatedQualifiedName; + if(StringUtils.isEmpty(sourceDomainQualifiedName)){ + updatedQualifiedName = createQualifiedName(targetDomainQualifiedName); + } else { + updatedQualifiedName = currentDataProductQualifiedName.replace(sourceDomainQualifiedName, targetDomainQualifiedName); + } + + product.setAttribute(QUALIFIED_NAME, updatedQualifiedName); + product.setAttribute(PARENT_DOMAIN_QN_ATTR, targetDomainQualifiedName); + product.setAttribute(SUPER_DOMAIN_QN_ATTR, superDomainQualifiedName); + + //Store domainPolicies and resources to be updated + String currentResource = "entity:"+ currentDataProductQualifiedName; + String updatedResource = "entity:"+ updatedQualifiedName; + this.updatedPolicyResources.put(currentResource, updatedResource); + + LOG.info("Moved dataProduct {} to Domain {}", productName, targetDomainQualifiedName); + + } finally { + RequestContext.get().endMetricRecord(recorder); + } + } + + private AtlasEntityHeader getParent(AtlasEntity productEntity) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("DataProductPreProcessor.getParent"); + + Object relationshipAttribute = productEntity.getRelationshipAttribute(DATA_DOMAIN_REL_TYPE); + + RequestContext.get().endMetricRecord(metricRecorder); + return getParent(relationshipAttribute, PARENT_ATTRIBUTES); + } + + private void productExists(String productName, String parentDomainQualifiedName, String guid) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("productExists"); + + try { + exists(DATA_PRODUCT_ENTITY_TYPE, productName, parentDomainQualifiedName, guid); + + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + private static String createQualifiedName(String parentDomainQualifiedName) throws AtlasBaseException { + if (StringUtils.isEmpty(parentDomainQualifiedName)) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Parent Domain Qualified Name cannot be empty or null"); + } + return parentDomainQualifiedName + "/product/" + PreProcessorUtils.getUUID(); + + } + + private AtlasEntity getPolicyEntity(AtlasEntity entity, String productGuid ) { + AtlasEntity policy = new AtlasEntity(); + policy.setTypeName(POLICY_ENTITY_TYPE); + policy.setAttribute(NAME, entity.getAttribute(NAME)); + policy.setAttribute(QUALIFIED_NAME, productGuid + "/read-policy"); + policy.setAttribute(ATTR_POLICY_ACTIONS, Arrays.asList("entity-read")); + policy.setAttribute(ATTR_POLICY_CATEGORY, MESH_POLICY_CATEGORY); + policy.setAttribute(ATTR_POLICY_TYPE, POLICY_TYPE_ALLOW); + policy.setAttribute(ATTR_POLICY_RESOURCES, Arrays.asList("entity:" + entity.getAttribute(QUALIFIED_NAME))); + policy.setAttribute(ATTR_POLICY_RESOURCES_CATEGORY, POLICY_RESOURCE_CATEGORY_PERSONA_ENTITY); + policy.setAttribute(ATTR_POLICY_SERVICE_NAME, "atlas"); + policy.setAttribute(ATTR_POLICY_SUB_CATEGORY, DATA_PRODUCT); // create new constant attr + + return policy; + } + + private void createDaapVisibilityPolicy(AtlasEntity entity,AtlasVertex vertex) throws AtlasBaseException { + String productGuid = vertex.getProperty("__guid", String.class); + String vis = AtlasEntityUtils.getStringAttribute(entity,DAAP_VISIBILITY_ATTR); + + if (vis != null && !vis.equals(PRIVATE)){ + AtlasEntity policy = getPolicyEntity(entity, productGuid); + + switch (vis) { + case PROTECTED: + setProtectedPolicyAttributes(policy, entity); + break; + case PUBLIC: + setPublicPolicyAttributes(policy); + break; + } + createPolicy(policy); + } + } + + private void updateDaapVisibilityPolicy(AtlasEntity newEntity, AtlasEntity currentEntity) throws AtlasBaseException{ + String newProductDaapVisibility = AtlasEntityUtils.getStringAttribute(newEntity,DAAP_VISIBILITY_ATTR);// check case if attribute is not sent from FE + AtlasObjectId atlasObjectId = new AtlasObjectId(); + atlasObjectId.setTypeName(POLICY_ENTITY_TYPE); + atlasObjectId.setUniqueAttributes(AtlasEntityUtils.mapOf(QUALIFIED_NAME,currentEntity.getGuid()+"/read-policy")); + AtlasVertex policyVertex = null; + try { + policyVertex = entityRetriever.getEntityVertex(atlasObjectId); + } + catch(AtlasBaseException exp){ + if(!exp.getAtlasErrorCode().equals(AtlasErrorCode.INSTANCE_BY_UNIQUE_ATTRIBUTE_NOT_FOUND)){ + throw exp; + } + } + + AtlasEntity policy; + if (policyVertex == null) { + policy = getPolicyEntity(newEntity, newEntity.getGuid()); + } else { + policy = entityRetriever.toAtlasEntity(policyVertex); + } + + Map updatedAttributes = new HashMap<>(); + + if (newProductDaapVisibility.equals(PRIVATE)) { + updatedAttributes = setPrivatePolicyAttributes(policy); + } + else if (newProductDaapVisibility.equals(PROTECTED)) { + updatedAttributes = setProtectedPolicyAttributes(policy, + newEntity + ); + } + else if (newProductDaapVisibility.equals(PUBLIC)) { + updatedAttributes = setPublicPolicyAttributes(policy); + } + + if (policyVertex == null) { + createPolicy(policy); + } else { + updatePolicy(policy, policyVertex, updatedAttributes); + } + } + + private void createPolicy(AtlasEntity policy) throws AtlasBaseException{ + try { + RequestContext.get().setSkipAuthorizationCheck(true); + AtlasEntity.AtlasEntitiesWithExtInfo policiesExtInfo = new AtlasEntity.AtlasEntitiesWithExtInfo(); + policiesExtInfo.addEntity(policy); + EntityStream entityStream = new AtlasEntityStream(policiesExtInfo); + entityStore.createOrUpdate(entityStream, false); // adding new policy + } finally { + RequestContext.get().setSkipAuthorizationCheck(false); + } + } + + private void updatePolicy(AtlasEntity policy, AtlasVertex policyVertex,Map updatedAttributes) { + AtlasEntityType entityType = typeRegistry.getEntityTypeByName(POLICY_ENTITY_TYPE); + context.addUpdated(policy.getGuid(), policy, entityType, policyVertex); + recordUpdatedChildEntities(policyVertex, updatedAttributes); + } + + private Map setPrivatePolicyAttributes(AtlasEntity policy) { + Map updatedAttributes = new HashMap<>(); + policy.setAttribute(ATTR_POLICY_USERS, Arrays.asList()); + policy.setAttribute(ATTR_POLICY_GROUPS, Arrays.asList()); + policy.setAttribute(ATTR_POLICY_IS_ENABLED, false); + + updatedAttributes.put(ATTR_POLICY_USERS, Arrays.asList()); + updatedAttributes.put(ATTR_POLICY_GROUPS, Arrays.asList()); + updatedAttributes.put(ATTR_POLICY_IS_ENABLED, false); + + return updatedAttributes; + } + + private Map setProtectedPolicyAttributes(AtlasEntity policy, AtlasEntity entity) { + List users = AtlasEntityUtils.getListAttribute(entity, DAAP_VISIBILITY_USERS_ATTR); + List groups = AtlasEntityUtils.getListAttribute(entity, DAAP_VISIBILITY_GROUPS_ATTR); + + policy.setAttribute(ATTR_POLICY_USERS, users); + policy.setAttribute(ATTR_POLICY_GROUPS, groups); + policy.setAttribute(ATTR_POLICY_IS_ENABLED, true); + + Map updatedAttributes = new HashMap<>(); + updatedAttributes.put(ATTR_POLICY_USERS, users); + updatedAttributes.put(ATTR_POLICY_GROUPS, groups); + updatedAttributes.put(ATTR_POLICY_IS_ENABLED, true); + return updatedAttributes; + } + + private Map setPublicPolicyAttributes(AtlasEntity policy) { + Map updatedAttributes = new HashMap<>(); + policy.setAttribute(ATTR_POLICY_USERS, Arrays.asList()); + policy.setAttribute(ATTR_POLICY_GROUPS, Arrays.asList("public")); + policy.setAttribute(ATTR_POLICY_IS_ENABLED, true); + + updatedAttributes.put(ATTR_POLICY_USERS, Arrays.asList()); + updatedAttributes.put(ATTR_POLICY_GROUPS, Arrays.asList("public")); + updatedAttributes.put(ATTR_POLICY_IS_ENABLED, true); + return updatedAttributes; + } + + private Boolean isDaapVisibilityChanged(AtlasEntity storedProduct, AtlasEntity newProduct){ + + boolean isDaapVisibilityChanged; + // check for daapVisibility change + String currentProductDaapVisibility = AtlasEntityUtils.getStringAttribute(storedProduct, DAAP_VISIBILITY_ATTR); + String newProductDaapVisibility = AtlasEntityUtils.getStringAttribute(newProduct, DAAP_VISIBILITY_ATTR); // check case if attribute is not sent from FE + + if(newProductDaapVisibility == null){ + return false; + } + + isDaapVisibilityChanged = (!newProductDaapVisibility.equals(currentProductDaapVisibility)); + if(isDaapVisibilityChanged){ + return true; + } + + // check if new daap visibility and old daap visibility is protected then check if any user, groups added changed + if (newProductDaapVisibility.equals(PROTECTED) && currentProductDaapVisibility.equals(PROTECTED)){ + + List storedUsers = AtlasEntityUtils.getListAttribute(storedProduct, DAAP_VISIBILITY_USERS_ATTR); + List storedGroups = AtlasEntityUtils.getListAttribute(storedProduct, DAAP_VISIBILITY_GROUPS_ATTR); + List newUsers = AtlasEntityUtils.getListAttribute(newProduct, DAAP_VISIBILITY_USERS_ATTR); + List newGroups = AtlasEntityUtils.getListAttribute(newProduct, DAAP_VISIBILITY_GROUPS_ATTR); + + isDaapVisibilityChanged = compareLists(storedUsers, newUsers) || compareLists(storedGroups, newGroups); + } + + return isDaapVisibilityChanged; + } + + public static boolean compareLists(List list1, List list2) { + return !CollectionUtils.disjunction(list1, list2).isEmpty(); + } + + @Override + public void processDelete(AtlasVertex vertex) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processProductDelete"); + + try{ + if(RequestContext.get().getDeleteType() != DeleteType.SOFT){ + String productGuid = vertex.getProperty("__guid", String.class); + AtlasObjectId atlasObjectId = new AtlasObjectId(); + atlasObjectId.setTypeName(POLICY_ENTITY_TYPE); + atlasObjectId.setUniqueAttributes(AtlasEntityUtils.mapOf(QUALIFIED_NAME, productGuid+"/read-policy")); + AtlasVertex policyVertex; + try { + policyVertex = entityRetriever.getEntityVertex(atlasObjectId); + entityStore.deleteById(policyVertex.getProperty("__guid", String.class)); + } + catch(AtlasBaseException exp){ + if(!exp.getAtlasErrorCode().equals(AtlasErrorCode.INSTANCE_BY_UNIQUE_ATTRIBUTE_NOT_FOUND)){ + throw exp; + } + } + } + } + finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + + } +} diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/StakeholderTitlePreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/StakeholderTitlePreProcessor.java new file mode 100644 index 00000000000..97685d1ed62 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/datamesh/StakeholderTitlePreProcessor.java @@ -0,0 +1,233 @@ +package org.apache.atlas.repository.store.graph.v2.preprocessor.datamesh; + +import org.apache.atlas.AtlasErrorCode; +import org.apache.atlas.AtlasException; +import org.apache.atlas.RequestContext; +import org.apache.atlas.authorize.AtlasAuthorizationUtils; +import org.apache.atlas.authorize.AtlasEntityAccessRequest; +import org.apache.atlas.authorize.AtlasPrivilege; +import org.apache.atlas.discovery.EntityDiscoveryService; +import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasEntityHeader; +import org.apache.atlas.model.instance.AtlasRelatedObjectId; +import org.apache.atlas.model.instance.AtlasRelationship; +import org.apache.atlas.model.instance.AtlasStruct; +import org.apache.atlas.model.instance.EntityMutations; +import org.apache.atlas.repository.graphdb.AtlasGraph; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; +import org.apache.atlas.repository.store.graph.v2.EntityMutationContext; +import org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessor; +import org.apache.atlas.type.AtlasTypeRegistry; +import org.apache.atlas.utils.AtlasPerfMetrics; +import org.apache.commons.collections.CollectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.Optional; + +import static java.lang.String.format; +import static org.apache.atlas.AtlasErrorCode.BAD_REQUEST; +import static org.apache.atlas.AtlasErrorCode.OPERATION_NOT_SUPPORTED; +import static org.apache.atlas.repository.Constants.DATA_DOMAIN_ENTITY_TYPE; +import static org.apache.atlas.repository.Constants.NAME; +import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; +import static org.apache.atlas.repository.Constants.STAKEHOLDER_TITLE_ENTITY_TYPE; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.getUUID; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.verifyDuplicateAssetByName; +import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; + +public class StakeholderTitlePreProcessor implements PreProcessor { + + private static final Logger LOG = LoggerFactory.getLogger(StakeholderTitlePreProcessor.class); + + public static final String PATTERN_QUALIFIED_NAME_ALL_DOMAINS = "stakeholderTitle/domain/default/%s"; + public static final String PATTERN_QUALIFIED_NAME_DOMAIN = "stakeholderTitle/domain/%s"; + + + public static final String STAR = "*/super"; + public static final String ATTR_DOMAIN_QUALIFIED_NAMES = "stakeholderTitleDomainQualifiedNames"; + + public static final String REL_ATTR_STAKEHOLDERS = "stakeholders"; + + private final AtlasTypeRegistry typeRegistry; + private final EntityGraphRetriever entityRetriever; + protected EntityDiscoveryService discovery; + + public StakeholderTitlePreProcessor(AtlasGraph graph, + AtlasTypeRegistry typeRegistry, + EntityGraphRetriever entityRetriever) { + this.typeRegistry = typeRegistry; + this.entityRetriever = entityRetriever; + + try { + this.discovery = new EntityDiscoveryService(typeRegistry, graph, null, null, null, null); + } catch (AtlasException e) { + e.printStackTrace(); + } + } + + @Override + public void processAttributes(AtlasStruct entityStruct, EntityMutationContext context, + EntityMutations.EntityOperation operation) throws AtlasBaseException { + if (LOG.isDebugEnabled()) { + LOG.debug("StakeholderTitlePreProcessor.processAttributes: pre processing {}, {}", entityStruct.getAttribute(QUALIFIED_NAME), operation); + } + + AtlasEntity entity = (AtlasEntity) entityStruct; + + switch (operation) { + case CREATE: + processCreateStakeholderTitle(entity); + break; + case UPDATE: + processUpdateStakeholderTitle(context, entity); + break; + } + } + + private void processCreateStakeholderTitle(AtlasEntity entity) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processCreateStakeholderTitle"); + + try { + validateRelations(entity); + + if (RequestContext.get().isSkipAuthorizationCheck()) { + // To create bootstrap titles with provided qualifiedName + return; + } + + String name = (String) entity.getAttribute(NAME); + verifyDuplicateAssetByName(STAKEHOLDER_TITLE_ENTITY_TYPE, name, discovery, + format("Stakeholder title with name %s already exists", name)); + + List domainQualifiedNames = null; + if (entity.hasAttribute(ATTR_DOMAIN_QUALIFIED_NAMES)) { + Object qNamesAsObject = entity.getAttribute(ATTR_DOMAIN_QUALIFIED_NAMES); + if (qNamesAsObject != null) { + domainQualifiedNames = (List) qNamesAsObject; + } + } + + if (CollectionUtils.isEmpty(domainQualifiedNames)) { + throw new AtlasBaseException(BAD_REQUEST, "Please provide attribute " + ATTR_DOMAIN_QUALIFIED_NAMES); + } + + if (domainQualifiedNames.contains(STAR)) { + if (domainQualifiedNames.size() > 1) { + + domainQualifiedNames.clear(); + domainQualifiedNames.add(STAR); + entity.setAttribute(ATTR_DOMAIN_QUALIFIED_NAMES, domainQualifiedNames); + } + + String qualifiedName = format(PATTERN_QUALIFIED_NAME_ALL_DOMAINS, getUUID()); + entity.setAttribute(QUALIFIED_NAME, qualifiedName); + + } else { + entity.setAttribute(QUALIFIED_NAME, format(PATTERN_QUALIFIED_NAME_DOMAIN, getUUID())); + } + + authorizeDomainAccess(domainQualifiedNames); + + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + private void processUpdateStakeholderTitle(EntityMutationContext context, AtlasEntity entity) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processUpdateStakeholderTitle"); + + try { + if (RequestContext.get().isSkipAuthorizationCheck()) { + // To create bootstrap titles with provided aualifiedName + return; + } + + validateRelations(entity); + + AtlasVertex vertex = context.getVertex(entity.getGuid()); + + String currentName = vertex.getProperty(NAME, String.class); + String newName = (String) entity.getAttribute(NAME); + if (!currentName.equals(newName)) { + verifyDuplicateAssetByName(STAKEHOLDER_TITLE_ENTITY_TYPE, newName, discovery, + format("StakeholderTitle with name %s already exists", newName)); + } + + List domainQualifiedNames = null; + if (entity.hasAttribute(ATTR_DOMAIN_QUALIFIED_NAMES)) { + Object qNamesAsObject = entity.getAttribute(ATTR_DOMAIN_QUALIFIED_NAMES); + if (qNamesAsObject != null) { + domainQualifiedNames = (List) qNamesAsObject; + } + } + + if (CollectionUtils.isEmpty(domainQualifiedNames)) { + domainQualifiedNames = vertex.getMultiValuedProperty(ATTR_DOMAIN_QUALIFIED_NAMES, String.class); + } + + authorizeDomainAccess(domainQualifiedNames); + + String vertexQName = vertex.getProperty(QUALIFIED_NAME, String.class); + entity.setAttribute(QUALIFIED_NAME, vertexQName); + + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + @Override + public void processDelete(AtlasVertex vertex) throws AtlasBaseException { + AtlasPerfMetrics.MetricRecorder metricRecorder = RequestContext.get().startMetricRecord("processDeleteStakeholderTitle"); + + try { + AtlasEntity titleEntity = entityRetriever.toAtlasEntity(vertex); + + List stakeholders = null; + Object stakeholdersAsObject = titleEntity.getRelationshipAttribute(REL_ATTR_STAKEHOLDERS); + if (stakeholdersAsObject != null) { + stakeholders = (List) stakeholdersAsObject; + } + + if (CollectionUtils.isNotEmpty(stakeholders)) { + Optional activeStakeholder = stakeholders.stream().filter(x -> x.getRelationshipStatus() == AtlasRelationship.Status.ACTIVE).findFirst(); + if (activeStakeholder.isPresent()) { + throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Can not delete StakeholderTitle as it has reference to Active Stakeholder"); + } + + List domainQualifiedNames = vertex.getMultiValuedProperty(ATTR_DOMAIN_QUALIFIED_NAMES, String.class); + + authorizeDomainAccess(domainQualifiedNames); + } + } finally { + RequestContext.get().endMetricRecord(metricRecorder); + } + } + + private void authorizeDomainAccess(List domainQualifiedNames) throws AtlasBaseException { + for (String domainQualifiedName: domainQualifiedNames) { + String domainQualifiedNameToAuth; + + if (domainQualifiedNames.contains(STAR)) { + domainQualifiedNameToAuth = "*/super"; + } else { + domainQualifiedNameToAuth = domainQualifiedName; + } + + AtlasEntityHeader domainHeaderToAuth = new AtlasEntityHeader(DATA_DOMAIN_ENTITY_TYPE, mapOf(QUALIFIED_NAME, domainQualifiedNameToAuth)); + + AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_UPDATE, new AtlasEntityHeader(domainHeaderToAuth)), + "mutate StakeholderTitle for domain ", domainQualifiedName); + } + } + + private void validateRelations(AtlasEntity entity) throws AtlasBaseException { + if (entity.hasRelationshipAttribute(REL_ATTR_STAKEHOLDERS)) { + throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Managing Stakeholders while creating/updating StakeholderTitle"); + } + } +} + diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/AbstractGlossaryPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/AbstractGlossaryPreProcessor.java index 91950f783c5..08c604489c1 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/AbstractGlossaryPreProcessor.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/AbstractGlossaryPreProcessor.java @@ -58,6 +58,7 @@ import static org.apache.atlas.repository.Constants.ELASTICSEARCH_PAGINATION_SIZE; import static org.apache.atlas.repository.Constants.NAME; import static org.apache.atlas.repository.Constants.STATE_PROPERTY_KEY; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.indexSearchPaginated; import static org.apache.atlas.repository.util.AtlasEntityUtils.mapOf; import static org.apache.atlas.type.Constants.MEANINGS_PROPERTY_KEY; import static org.apache.atlas.type.Constants.MEANINGS_TEXT_PROPERTY_KEY; @@ -103,7 +104,7 @@ public void termExists(String termName, String glossaryQName) throws AtlasBaseEx Map dsl = mapOf("query", mapOf("bool", mapOf("must", mustClauseList))); - List terms = indexSearchPaginated(dsl); + List terms = indexSearchPaginated(dsl, null, this.discovery); if (CollectionUtils.isNotEmpty(terms)) { ret = terms.stream().map(term -> (String) term.getAttribute(NAME)).anyMatch(name -> termName.equals(name)); @@ -137,38 +138,6 @@ public boolean checkEntityTermAssociation(String termQName) throws AtlasBaseExce return entityHeader != null; } - public List indexSearchPaginated(Map dsl) throws AtlasBaseException { - IndexSearchParams searchParams = new IndexSearchParams(); - List ret = new ArrayList<>(); - - List sortList = new ArrayList<>(0); - sortList.add(mapOf("__timestamp", mapOf("order", "asc"))); - sortList.add(mapOf("__guid", mapOf("order", "asc"))); - dsl.put("sort", sortList); - - int from = 0; - int size = 100; - boolean hasMore = true; - do { - dsl.put("from", from); - dsl.put("size", size); - searchParams.setDsl(dsl); - - List headers = discovery.directIndexSearch(searchParams).getEntities(); - - if (CollectionUtils.isNotEmpty(headers)) { - ret.addAll(headers); - } else { - hasMore = false; - } - - from += size; - - } while (hasMore); - - return ret; - } - public void updateMeaningsAttributesInEntitiesOnTermUpdate(String currentTermName, String updatedTermName, String termQName, String updatedTermQName, String termGuid) throws AtlasBaseException { diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/CategoryPreProcessor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/CategoryPreProcessor.java index eb39ff3b1d2..88f72d2f16d 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/CategoryPreProcessor.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/preprocessor/glossary/CategoryPreProcessor.java @@ -356,7 +356,7 @@ private void categoryExists(String categoryName, String glossaryQualifiedName) t Map dsl = mapOf("query", mapOf("bool", bool)); - List categories = indexSearchPaginated(dsl); + List categories = indexSearchPaginated(dsl, null, this.discovery); if (CollectionUtils.isNotEmpty(categories)) { for (AtlasEntityHeader category : categories) { diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagateTaskFactory.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagateTaskFactory.java index ca32e234087..65ac992b17d 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagateTaskFactory.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagateTaskFactory.java @@ -54,6 +54,8 @@ public class ClassificationPropagateTaskFactory implements TaskFactory { public static final String CLASSIFICATION_PROPAGATION_RELATIONSHIP_UPDATE = "CLASSIFICATION_PROPAGATION_RELATIONSHIP_UPDATE"; + public static final String CLEANUP_CLASSIFICATION_PROPAGATION = "CLEANUP_CLASSIFICATION_PROPAGATION"; + public static final List supportedTypes = new ArrayList() {{ @@ -63,6 +65,7 @@ public class ClassificationPropagateTaskFactory implements TaskFactory { add(CLASSIFICATION_ONLY_PROPAGATION_DELETE_ON_HARD_DELETE); add(CLASSIFICATION_REFRESH_PROPAGATION); add(CLASSIFICATION_PROPAGATION_RELATIONSHIP_UPDATE); + add(CLEANUP_CLASSIFICATION_PROPAGATION); }}; @@ -102,6 +105,10 @@ public org.apache.atlas.tasks.AbstractTask create(AtlasTask task) { case CLASSIFICATION_PROPAGATION_RELATIONSHIP_UPDATE: return new ClassificationPropagationTasks.UpdateRelationship(task, graph, entityGraphMapper, deleteDelegate, relationshipStore); + case CLEANUP_CLASSIFICATION_PROPAGATION: + return new ClassificationPropagationTasks.CleanUpClassificationPropagation(task, graph, entityGraphMapper, deleteDelegate, relationshipStore); + + default: LOG.warn("Type: {} - {} not found!. The task will be ignored.", taskType, taskGuid); return null; diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagationTasks.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagationTasks.java index 69abc3aafc4..d1191d3aac4 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagationTasks.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationPropagationTasks.java @@ -121,4 +121,17 @@ protected void run(Map parameters) throws AtlasBaseException { entityGraphMapper.updateTagPropagations(relationshipEdgeId, relationship); } } + + public static class CleanUpClassificationPropagation extends ClassificationTask { + public CleanUpClassificationPropagation(AtlasTask task, AtlasGraph graph, EntityGraphMapper entityGraphMapper, DeleteHandlerDelegate deleteDelegate, AtlasRelationshipStore relationshipStore) { + super(task, graph, entityGraphMapper, deleteDelegate, relationshipStore); + } + + @Override + protected void run(Map parameters) throws AtlasBaseException { + String classificationName = (String) parameters.get(PARAM_CLASSIFICATION_NAME); + + entityGraphMapper.cleanUpClassificationPropagation(classificationName); + } + } } diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationTask.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationTask.java index 76112dd6852..f1796ad5bd6 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationTask.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/tasks/ClassificationTask.java @@ -50,6 +50,8 @@ public abstract class ClassificationTask extends AbstractTask { public static final String PARAM_RELATIONSHIP_GUID = "relationshipGuid"; public static final String PARAM_RELATIONSHIP_OBJECT = "relationshipObject"; public static final String PARAM_RELATIONSHIP_EDGE_ID = "relationshipEdgeId"; + + public static final String PARAM_CLASSIFICATION_NAME = "classificationName"; public static final String PARAM_REFERENCED_VERTEX_ID = "referencedVertexId"; public static final String PARAM_IS_TERM_ENTITY_EDGE = "isTermEntityEdge"; public static final String PARAM_PREVIOUS_CLASSIFICATION_RESTRICT_PROPAGATE_THROUGH_LINEAGE = "previousRestrictPropagationThroughLineage"; diff --git a/repository/src/main/java/org/apache/atlas/repository/util/AccessControlUtils.java b/repository/src/main/java/org/apache/atlas/repository/util/AccessControlUtils.java index d4686a9562a..c2c04b8d4ed 100644 --- a/repository/src/main/java/org/apache/atlas/repository/util/AccessControlUtils.java +++ b/repository/src/main/java/org/apache/atlas/repository/util/AccessControlUtils.java @@ -18,7 +18,6 @@ package org.apache.atlas.repository.util; import org.apache.atlas.exception.AtlasBaseException; -import org.apache.atlas.featureflag.FeatureFlagStore; import org.apache.atlas.model.discovery.IndexSearchParams; import org.apache.atlas.model.instance.AtlasEntity; import org.apache.atlas.model.instance.AtlasEntityHeader; @@ -28,7 +27,6 @@ import org.apache.atlas.repository.graphdb.AtlasIndexQuery; import org.apache.atlas.repository.graphdb.AtlasVertex; import org.apache.atlas.repository.graphdb.DirectIndexQueryResult; -import org.apache.atlas.repository.store.graph.AtlasEntityStore; import org.apache.atlas.repository.store.graph.v2.EntityGraphRetriever; import org.apache.atlas.util.NanoIdUtils; import org.apache.commons.collections.CollectionUtils; @@ -43,17 +41,8 @@ import java.util.stream.Collectors; import static org.apache.atlas.AtlasErrorCode.ACCESS_CONTROL_ALREADY_EXISTS; -import static org.apache.atlas.AtlasErrorCode.DISABLED_OPERATION; import static org.apache.atlas.AtlasErrorCode.OPERATION_NOT_SUPPORTED; -import static org.apache.atlas.repository.Constants.ATTR_ADMIN_GROUPS; -import static org.apache.atlas.repository.Constants.ATTR_ADMIN_ROLES; -import static org.apache.atlas.repository.Constants.ATTR_ADMIN_USERS; -import static org.apache.atlas.repository.Constants.ATTR_TENANT_ID; -import static org.apache.atlas.repository.Constants.CONNECTION_ENTITY_TYPE; -import static org.apache.atlas.repository.Constants.DEFAULT_TENANT_ID; -import static org.apache.atlas.repository.Constants.NAME; -import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; -import static org.apache.atlas.repository.Constants.VERTEX_INDEX_NAME; +import static org.apache.atlas.repository.Constants.*; import static org.apache.atlas.repository.util.AtlasEntityUtils.getListAttribute; import static org.apache.atlas.repository.util.AtlasEntityUtils.getQualifiedName; import static org.apache.atlas.repository.util.AtlasEntityUtils.getStringAttribute; @@ -260,19 +249,18 @@ public static AtlasEntity extractConnectionFromResource(EntityGraphRetriever ent } public static String getPersonaRoleName(AtlasEntity persona) { - String qualifiedName = getStringAttribute(persona, QUALIFIED_NAME); - - String[] parts = qualifiedName.split("/"); - - return "persona_" + parts[parts.length - 1]; + return "persona_" + getESAliasName(persona); } public static String getESAliasName(AtlasEntity entity) { String qualifiedName = getStringAttribute(entity, QUALIFIED_NAME); + return getESAliasName(qualifiedName); + } + public static String getESAliasName(String qualifiedName) { String[] parts = qualifiedName.split("/"); - return parts[parts.length - 1]; + return parts[1]; } public static List getPolicies(AtlasEntity.AtlasEntityWithExtInfo accessControl) { @@ -345,7 +333,7 @@ public static String getTenantId(AtlasStruct entity) { public static void validateNoPoliciesAttached(AtlasEntity entity) throws AtlasBaseException { List policies = (List) entity.getRelationshipAttribute(REL_ATTR_POLICIES); if (CollectionUtils.isNotEmpty(policies)) { - throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Can not attach a policy while creating/updating Persona/Purpose"); + throw new AtlasBaseException(OPERATION_NOT_SUPPORTED, "Can not attach a policy while creating/updating Persona/Purpose/Stakeholder"); } } @@ -379,7 +367,8 @@ public static void validateUniquenessByTags(AtlasGraph graph, List tags, private static boolean hasMatchingVertex(AtlasGraph graph, List newTags, IndexSearchParams indexSearchParams) throws AtlasBaseException { - AtlasIndexQuery indexQuery = graph.elasticsearchQuery(VERTEX_INDEX_NAME); + String vertexIndexName = getESIndex(); + AtlasIndexQuery indexQuery = graph.elasticsearchQuery(vertexIndexName); DirectIndexQueryResult indexQueryResult = indexQuery.vertices(indexSearchParams); Iterator iterator = indexQueryResult.getIterator(); diff --git a/repository/src/main/java/org/apache/atlas/tasks/AtlasTaskService.java b/repository/src/main/java/org/apache/atlas/tasks/AtlasTaskService.java index ba0fe1a4da7..d8269633d43 100644 --- a/repository/src/main/java/org/apache/atlas/tasks/AtlasTaskService.java +++ b/repository/src/main/java/org/apache/atlas/tasks/AtlasTaskService.java @@ -148,7 +148,7 @@ public List createAtlasTasks(List tasks) throws AtlasBaseE if (!supportedTypes.contains(taskType)) { throw new AtlasBaseException(AtlasErrorCode.TASK_TYPE_NOT_SUPPORTED, task.getType()); } - if (isClassificationTaskType(taskType)) { + if (isClassificationTaskType(taskType) && !taskType.equals(ClassificationPropagateTaskFactory.CLEANUP_CLASSIFICATION_PROPAGATION)) { String classificationName = task.getClassificationName(); String entityGuid = task.getEntityGuid(); String classificationId = StringUtils.isEmpty(task.getClassificationId()) ? resolveAndReturnClassificationId(classificationName, entityGuid) : task.getClassificationId(); diff --git a/server-api/src/main/java/org/apache/atlas/RequestContext.java b/server-api/src/main/java/org/apache/atlas/RequestContext.java index 1c7ccababb6..565832b7bd5 100644 --- a/server-api/src/main/java/org/apache/atlas/RequestContext.java +++ b/server-api/src/main/java/org/apache/atlas/RequestContext.java @@ -88,6 +88,8 @@ public class RequestContext { private boolean allowDeletedRelationsIndexsearch = false; private boolean includeMeanings = true; private boolean includeClassifications = true; + + private boolean includeClassificationNames = false; private String currentTypePatchAction = ""; private AtlasTask currentTask; private String traceId; @@ -175,7 +177,7 @@ public void clearCache() { } if (CollectionUtils.isNotEmpty(applicationMetrics)) { if (Objects.nonNull(this.metricsRegistry)){ - this.metricsRegistry.collectIndexsearch(traceId, this.requestUri, applicationMetrics); + this.metricsRegistry.collectApplicationMetrics(traceId, this.requestUri, applicationMetrics); } applicationMetrics.clear(); } @@ -719,6 +721,14 @@ public boolean isCacheEnabled() { return this.cacheEnabled; } + public boolean isIncludeClassificationNames() { + return includeClassificationNames; + } + + public void setIncludeClassificationNames(boolean includeClassificationNames) { + this.includeClassificationNames = includeClassificationNames; + } + public class EntityGuidPair { private final Object entity; private final String guid; diff --git a/webapp/src/main/java/org/apache/atlas/web/filters/ActiveServerFilter.java b/webapp/src/main/java/org/apache/atlas/web/filters/ActiveServerFilter.java index cedf9c201a2..2721efa6f25 100644 --- a/webapp/src/main/java/org/apache/atlas/web/filters/ActiveServerFilter.java +++ b/webapp/src/main/java/org/apache/atlas/web/filters/ActiveServerFilter.java @@ -21,6 +21,7 @@ import org.apache.atlas.AtlasConfiguration; import org.apache.atlas.AtlasErrorCode; import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.service.FeatureFlagStore; import org.apache.atlas.type.AtlasType; import org.apache.atlas.web.service.ActiveInstanceState; import org.apache.atlas.web.service.ServiceState; @@ -56,9 +57,9 @@ public class ActiveServerFilter implements Filter { private static final Logger LOG = LoggerFactory.getLogger(ActiveServerFilter.class); private static final String MIGRATION_STATUS_STATIC_PAGE = "migration-status.html"; - private static final String[] WHITELISTED_APIS_SIGNATURE = {"search", "lineage", "auditSearch", "accessors" - , "evaluator"}; + , "evaluator", "featureFlag"}; + private static final String DISABLE_WRITE_FLAG = "disable_writes"; private final ActiveInstanceState activeInstanceState; private ServiceState serviceState; @@ -88,13 +89,15 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo FilterChain filterChain) throws IOException, ServletException { // If maintenance mode is enabled, return a 503 if (AtlasConfiguration.ATLAS_MAINTENANCE_MODE.getBoolean()) { - // Block all the POST, PUT, DELETE operations - HttpServletRequest request = (HttpServletRequest) servletRequest; - HttpServletResponse response = (HttpServletResponse) servletResponse; - if (isBlockedMethod(request.getMethod()) && !isWhitelistedAPI(request.getRequestURI())) { - LOG.error("Maintenance mode enabled. Blocking request: {}", request.getRequestURI()); - sendMaintenanceModeResponse(response); - return; // Stop further processing + if (FeatureFlagStore.evaluate(DISABLE_WRITE_FLAG, "true")) { + // Block all the POST, PUT, DELETE operations + HttpServletRequest request = (HttpServletRequest) servletRequest; + HttpServletResponse response = (HttpServletResponse) servletResponse; + if (isBlockedMethod(request.getMethod()) && !isWhitelistedAPI(request.getRequestURI())) { + LOG.error("Maintenance mode enabled. Blocking request: {}", request.getRequestURI()); + sendMaintenanceModeResponse(response); + return; // Stop further processing + } } } diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java index 3afd2b451d3..71d773a8791 100755 --- a/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java +++ b/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java @@ -56,6 +56,7 @@ import org.apache.atlas.repository.impexp.ZipSink; import org.apache.atlas.repository.patches.AtlasPatchManager; import org.apache.atlas.repository.store.graph.AtlasEntityStore; +import org.apache.atlas.service.FeatureFlagStore; import org.apache.atlas.service.metrics.MetricsRegistry; import org.apache.atlas.services.MetricsService; import org.apache.atlas.tasks.TaskManagement; @@ -930,6 +931,21 @@ public Map getDebugMetrics() { return debugMetricsRESTSink.getMetrics(); } + @POST + @Path("featureFlag") + @Produces(MediaType.APPLICATION_JSON) + public void setFeatureFlag(@QueryParam("key") String key, @QueryParam("value") String value) throws AtlasBaseException { + AtlasAuthorizationUtils.verifyAccess(new AtlasAdminAccessRequest(AtlasPrivilege.ADMIN_FEATURE_FLAG_CUD), "featureFlag"); + FeatureFlagStore.setFlag(key, value); + } + + @DELETE + @Path("featureFlag/{flag}") + @Produces(MediaType.APPLICATION_JSON) + public void deleteFeatureFlag(@PathParam("flag") String key) throws AtlasBaseException { + AtlasAuthorizationUtils.verifyAccess(new AtlasAdminAccessRequest(AtlasPrivilege.ADMIN_FEATURE_FLAG_CUD), "featureFlag"); + FeatureFlagStore.deleteFlag(key); + } private String getEditableEntityTypes(Configuration config) { String ret = DEFAULT_EDITABLE_ENTITY_TYPES; diff --git a/webapp/src/main/java/org/apache/atlas/web/rest/DiscoveryREST.java b/webapp/src/main/java/org/apache/atlas/web/rest/DiscoveryREST.java index 7717baa49ac..590e3cb0bfa 100644 --- a/webapp/src/main/java/org/apache/atlas/web/rest/DiscoveryREST.java +++ b/webapp/src/main/java/org/apache/atlas/web/rest/DiscoveryREST.java @@ -94,6 +94,7 @@ public class DiscoveryREST { private static final String INDEXSEARCH_TAG_NAME = "indexsearch"; private static final Set TRACKING_UTM_TAGS = new HashSet<>(Arrays.asList("ui_main_list", "ui_popup_searchbar")); + private static final String UTM_TAG_FROM_PRODUCT = "project_webapp"; @Inject public DiscoveryREST(AtlasTypeRegistry typeRegistry, AtlasDiscoveryService discoveryService, @@ -393,6 +394,7 @@ public AtlasSearchResult indexSearch(@Context HttpServletRequest servletRequest, RequestContext.get().setIncludeMeanings(!parameters.isExcludeMeanings()); RequestContext.get().setIncludeClassifications(!parameters.isExcludeClassifications()); + RequestContext.get().setIncludeClassificationNames(parameters.isIncludeClassificationNames()); try { if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "DiscoveryREST.indexSearch(" + parameters + ")"); @@ -435,12 +437,16 @@ public AtlasSearchResult indexSearch(@Context HttpServletRequest servletRequest, if(CollectionUtils.isNotEmpty(parameters.getUtmTags())) { AtlasPerfMetrics.Metric indexsearchMetric = new AtlasPerfMetrics.Metric(INDEXSEARCH_TAG_NAME); indexsearchMetric.addTag("utmTag", "other"); + indexsearchMetric.addTag("source", "other"); for (String utmTag : parameters.getUtmTags()) { if (TRACKING_UTM_TAGS.contains(utmTag)) { indexsearchMetric.addTag("utmTag", utmTag); break; } } + if (parameters.getUtmTags().contains(UTM_TAG_FROM_PRODUCT)) { + indexsearchMetric.addTag("source", UTM_TAG_FROM_PRODUCT); + } indexsearchMetric.addTag("name", INDEXSEARCH_TAG_NAME); indexsearchMetric.setTotalTimeMSecs(System.currentTimeMillis() - startTime); RequestContext.get().addApplicationMetrics(indexsearchMetric); diff --git a/webapp/src/main/java/org/apache/atlas/web/rest/EntityREST.java b/webapp/src/main/java/org/apache/atlas/web/rest/EntityREST.java index ee6846fd8ff..c5c95bb98f5 100644 --- a/webapp/src/main/java/org/apache/atlas/web/rest/EntityREST.java +++ b/webapp/src/main/java/org/apache/atlas/web/rest/EntityREST.java @@ -97,6 +97,8 @@ public class EntityREST { private static final int TWO_MILLION = HUNDRED_THOUSAND * 10 * 2; private static final Set ATTRS_WITH_TWO_MILLION_LIMIT = new HashSet() {{ add("rawQueryText"); + add("variablesSchemaBase64"); + add("visualBuilderSchemaBase64"); }}; @@ -1274,6 +1276,25 @@ public void setClassifications(AtlasEntityHeaders entityHeaders) throws AtlasBas } } + @POST + @Path("repairClassificationsMappings/{guid}") + @Produces(Servlets.JSON_MEDIA_TYPE) + @Consumes(Servlets.JSON_MEDIA_TYPE) + @Timed + public void repairClassifications(@PathParam("guid") String guid) throws AtlasBaseException { + AtlasPerfTracer perf = null; + + try { + if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntityREST.repairClassifications()"); + } + + entitiesStore.repairClassificationMappings(guid); + } finally { + AtlasPerfTracer.log(perf); + } + } + @POST @Path("/guid/{guid}/businessmetadata") @Produces(Servlets.JSON_MEDIA_TYPE) @@ -1911,4 +1932,29 @@ public void repairIndexByTypeName(@PathParam("typename") String typename, @Query AtlasPerfTracer.log(perf); } } + + @POST + @Path("/repair/accesscontrolAlias/{guid}") + @Timed + public void repairAccessControlAlias(@PathParam("guid") String guid) throws AtlasBaseException { + Servlets.validateQueryParamLength("guid", guid); + + AtlasPerfTracer perf = null; + + + try { + if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntityREST.repairAccessControlAlias"); + } + + entitiesStore.repairAccesscontrolAlias(guid); + + LOG.info("Repaired access control alias for entity with guid {}", guid); + + } finally { + AtlasPerfTracer.log(perf); + } + + + } } diff --git a/webapp/src/main/java/org/apache/atlas/web/rest/MigrationREST.java b/webapp/src/main/java/org/apache/atlas/web/rest/MigrationREST.java index b491bb88ced..551d0f4aa2d 100644 --- a/webapp/src/main/java/org/apache/atlas/web/rest/MigrationREST.java +++ b/webapp/src/main/java/org/apache/atlas/web/rest/MigrationREST.java @@ -3,6 +3,7 @@ import org.apache.atlas.AtlasErrorCode; import org.apache.atlas.RequestContext; import org.apache.atlas.annotation.Timed; +import org.apache.atlas.discovery.EntityDiscoveryService; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.discovery.IndexSearchParams; import org.apache.atlas.model.instance.AtlasEntity; @@ -10,10 +11,12 @@ import org.apache.atlas.repository.graph.GraphHelper; import org.apache.atlas.repository.graphdb.*; import org.apache.atlas.repository.store.graph.AtlasEntityStore; -import org.apache.atlas.repository.store.graph.v2.AtlasEntityStream; -import org.apache.atlas.repository.store.graph.v2.EntityStream; +import org.apache.atlas.repository.store.graph.v2.*; +import org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils; import org.apache.atlas.repository.store.users.KeycloakStore; +import org.apache.atlas.service.redis.RedisService; import org.apache.atlas.transformer.PreProcessorPoliciesTransformer; +import org.apache.atlas.type.AtlasTypeRegistry; import org.apache.atlas.utils.AtlasPerfTracer; import org.apache.atlas.v1.model.instance.Id; import org.apache.atlas.web.util.Servlets; @@ -35,6 +38,7 @@ import static org.apache.atlas.auth.client.keycloak.AtlasKeycloakClient.getKeycloakClient; import static org.apache.atlas.repository.Constants.*; +import static org.apache.atlas.repository.store.graph.v2.preprocessor.PreProcessorUtils.*; @Path("migration") @Singleton @@ -54,12 +58,115 @@ public class MigrationREST { private KeycloakStore keycloakStore; private AtlasGraph graph; + private final EntityGraphRetriever entityRetriever; + private final RedisService redisService; + protected final AtlasTypeRegistry typeRegistry; + private final EntityDiscoveryService discovery; + + private final TransactionInterceptHelper transactionInterceptHelper; + @Inject - public MigrationREST(AtlasEntityStore entityStore, AtlasGraph graph) { + public MigrationREST(AtlasEntityStore entityStore, AtlasGraph graph, RedisService redisService, EntityDiscoveryService discovery, + EntityGraphRetriever entityRetriever, AtlasTypeRegistry typeRegistry, TransactionInterceptHelper transactionInterceptHelper) { this.entityStore = entityStore; this.graph = graph; this.transformer = new PreProcessorPoliciesTransformer(); keycloakStore = new KeycloakStore(); + this.redisService = redisService; + this.discovery = discovery; + this.entityRetriever = entityRetriever; + this.typeRegistry = typeRegistry; + this.transactionInterceptHelper = transactionInterceptHelper; + } + + @POST + @Path("submit") + @Timed + public Boolean submit (@QueryParam("migrationType") String migrationType, @QueryParam("forceMigration") boolean forceMigration) throws Exception { + AtlasPerfTracer perf = null; + MigrationService migrationService; + + try { + if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "MigrationREST.submit(" + migrationType + ")"); + } + + migrationType = MIGRATION_TYPE_PREFIX + migrationType; + + isMigrationInProgress(migrationType); + + switch (migrationType) { + case DATA_MESH_QN: + migrationService = new DataMeshQNMigrationService(entityStore, discovery, entityRetriever, typeRegistry, transactionInterceptHelper, redisService, forceMigration); + break; + + default: + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, "Type of migration is not valid: " + migrationType); + } + + Thread migrationThread = new Thread(migrationService); + migrationThread.start(); + + } catch (Exception e) { + LOG.error("Error while submitting migration", e); + return Boolean.FALSE; + } finally { + AtlasPerfTracer.log(perf); + } + return Boolean.TRUE; + } + + private void isMigrationInProgress(String migrationType) throws AtlasBaseException { + String status = redisService.getValue(migrationType); + if (PreProcessorUtils.MigrationStatus.IN_PROGRESS.name().equals(status)) { + throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, + String.format("Migration for %s is already in progress", migrationType)); + } + } + + @GET + @Path("status") + @Timed + public String getMigrationStatus(@QueryParam("migrationType") String migrationType) throws Exception { + AtlasPerfTracer perf = null; + + try { + if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "MigrationREST.getMigrationStatus(" + migrationType + ")"); + } + + String value = redisService.getValue(MIGRATION_TYPE_PREFIX + migrationType); + + return Objects.nonNull(value) ? value : "No Migration Found with this key"; + } catch (Exception e) { + LOG.error("Error while fetching status for migration", e); + throw e; + } finally { + AtlasPerfTracer.log(perf); + } + } + + @POST + @Path("dataproduct/inputs-outputs") + @Timed + public Boolean migrateProductInternalAttr (@QueryParam("guid") String guid) throws Exception { + AtlasPerfTracer perf = null; + + try { + if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "MigrationREST.migrateProductInternalAttr(" + guid + ")"); + } + + DataProductInputsOutputsMigrationService migrationService = new DataProductInputsOutputsMigrationService(entityRetriever, guid, transactionInterceptHelper); + migrationService.migrateProduct(); + + } catch (Exception e) { + LOG.error("Error while migration inputs/outputs for Dataproduct: {}", guid, e); + throw e; + } finally { + AtlasPerfTracer.log(perf); + } + return Boolean.TRUE; } @POST