-
Notifications
You must be signed in to change notification settings - Fork 244
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Update the validation for loading to check the required fields as well (
#3807) To ensure field-level permissions and successful dataset load, there are several validations performed on the `mapping.yml` file. These validations check whether the fields and SObjects have the required permissions, and whether namespaces need to be injected, as well as handling case insensitivity for the `mapping.yml`. This functionality is already implemented in the function `validate_and_inject_mapping`. However, there was a missing corner case where the function did not capture errors for required fields missing in the mapping.yml. This functionality has now been added, and the function is used for the preflight check. --------- Co-authored-by: James Estevez <[email protected]>
- Loading branch information
1 parent
b7eda89
commit 287c119
Showing
11 changed files
with
286 additions
and
15 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -115,15 +115,15 @@ def test_run__person_accounts_disabled(self, query_op_mock): | |
sobject="Account", | ||
api_options={}, | ||
context=task, | ||
query="SELECT Id FROM Account", | ||
query="SELECT Id, Name FROM Account", | ||
) | ||
mock_query_contacts = MockBulkQueryOperation( | ||
sobject="Contact", | ||
api_options={}, | ||
context=task, | ||
query="SELECT Id, FirstName, LastName, Email, AccountId FROM Contact", | ||
) | ||
mock_query_households.results = [["1"]] | ||
mock_query_households.results = [["1", "None"]] | ||
mock_query_contacts.results = [ | ||
["2", "First", "Last", "[email protected]", "1"] | ||
] | ||
|
@@ -170,15 +170,15 @@ def test_run__person_accounts_enabled(self, query_op_mock): | |
sobject="Account", | ||
api_options={}, | ||
context=task, | ||
query="SELECT Id, IsPersonAccount FROM Account", | ||
query="SELECT Id, Name IsPersonAccount FROM Account", | ||
) | ||
mock_query_contacts = MockBulkQueryOperation( | ||
sobject="Contact", | ||
api_options={}, | ||
context=task, | ||
query="SELECT Id, FirstName, LastName, Email, IsPersonAccount, AccountId FROM Contact", | ||
) | ||
mock_query_households.results = [["1", "false"]] | ||
mock_query_households.results = [["1", "None", "false"]] | ||
mock_query_contacts.results = [ | ||
["2", "First", "Last", "[email protected]", "true", "1"] | ||
] | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -122,9 +122,8 @@ def test_run(self, dml_mock): | |
|
||
mock_describe_calls() | ||
task() | ||
|
||
assert step.records == [ | ||
["TestHousehold", "1"], | ||
["TestHousehold", "TestHousehold", "1"], | ||
["Test", "User", "[email protected]", "001000000000000"], | ||
["Error", "User", "[email protected]", "001000000000000"], | ||
] | ||
|
@@ -387,9 +386,8 @@ def test_run__sql(self, dml_mock): | |
] | ||
mock_describe_calls() | ||
task() | ||
|
||
assert step.records == [ | ||
["TestHousehold", "1"], | ||
[None, "TestHousehold", "1"], | ||
["Test☃", "User", "[email protected]", "001000000000000"], | ||
["Error", "User", "[email protected]", "001000000000000"], | ||
] | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
from cumulusci.core.datasets import Dataset | ||
from cumulusci.core.exceptions import BulkDataException | ||
from cumulusci.tasks.bulkdata.mapping_parser import ( | ||
parse_from_yaml, | ||
validate_and_inject_mapping, | ||
) | ||
from cumulusci.tasks.bulkdata.step import DataOperationType | ||
from cumulusci.tasks.salesforce import BaseSalesforceApiTask | ||
|
||
|
||
class LoadDataSetCheck(BaseSalesforceApiTask): | ||
task_docs = """ | ||
A preflight check to ensure a dataset can be loaded successfully | ||
""" | ||
task_options = { | ||
"dataset": { | ||
"description": "Dataset on which preflight checks need to be performed", | ||
"required": False, | ||
}, | ||
} | ||
|
||
def _init_options(self, kwargs): | ||
super(BaseSalesforceApiTask, self)._init_options(kwargs) | ||
if "dataset" not in self.options: | ||
self.options["dataset"] = "default" | ||
|
||
def _run_task(self): | ||
mapping_file_path = Dataset( | ||
self.options["dataset"], | ||
self.project_config, | ||
self.sf, | ||
self.org_config, | ||
schema=None, | ||
).mapping_file | ||
self.mapping = parse_from_yaml(mapping_file_path) | ||
try: | ||
validate_and_inject_mapping( | ||
mapping=self.mapping, | ||
sf=self.sf, | ||
namespace=self.project_config.project__package__namespace, | ||
data_operation=DataOperationType.INSERT, | ||
inject_namespaces=True, | ||
drop_missing=False, | ||
) | ||
self.return_values = True | ||
except BulkDataException as e: | ||
self.logger.error(e) | ||
self.return_values = False | ||
return self.return_values |
Oops, something went wrong.