Skip to content

Commit

Permalink
post-review of role-assignments
Browse files Browse the repository at this point in the history
* code formatting, redundant import
* extracted method create_batch_processor
* implemented dry-run for subcommand list
* implemented fail-on-first-error for subcommand add
* redundant arg and check in get_aliases
  • Loading branch information
jo-pol committed Feb 19, 2024
1 parent d69b714 commit 7044877
Show file tree
Hide file tree
Showing 3 changed files with 32 additions and 36 deletions.
12 changes: 2 additions & 10 deletions src/datastation/common/common_batch_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,23 +74,15 @@ def get_pids(pid_or_pids_file, search_api=None, query="*", subtree="root", objec
return get_provided_items_iterator(pid_or_pids_file, "pid")


def get_aliases(alias_or_aliases_file, dry_run=False):
def get_aliases(alias_or_aliases_file):
"""
Args:
alias_or_aliases_file: The dataverse alias, or a file with a list of aliases.
dry_run: Do not perform the action, but show what would be done.
Only applicable if pid_or_pids_file is None.
Returns: an iterator with aliases
"""
if alias_or_aliases_file is None:
# The tree of all (published) dataverses could be retrieved and aliases could recursively be extracted
# from the tree, but this is not implemented yet.
logging.warning(f"No aliases provided, nothing to do.")
return None
else:
return get_provided_items_iterator(alias_or_aliases_file, "alias")
return get_provided_items_iterator(alias_or_aliases_file, "alias")


class DatasetBatchProcessor(CommonBatchProcessor):
Expand Down
10 changes: 5 additions & 5 deletions src/datastation/dataverse/roles.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import rich
from datetime import datetime

from datastation.common.common_batch_processing import DataverseBatchProcessorWithReport, get_aliases
import rich

from datastation.dataverse.dataverse_api import DataverseApi
from datastation.dataverse.dataverse_client import DataverseClient

Expand All @@ -13,7 +13,7 @@ def __init__(self, dataverse_client: DataverseClient, dry_run: bool = False):
self.dry_run = dry_run

def list_role_assignments(self, alias):
r = self.dataverse_client.dataverse(alias).get_role_assignments()
r = self.dataverse_client.dataverse(alias).get_role_assignments(self.dry_run)
if r is not None:
rich.print_json(data=r)

Expand All @@ -32,7 +32,8 @@ def add_role_assignment(self, role_assignment, dataverse_api: DataverseApi, csv_
{'alias': dataverse_api.get_alias(), 'Modified': datetime.now(), 'Assignee': assignee, 'Role': role,
'Change': action})

def in_current_assignments(self, assignee, role, dataverse_api: DataverseApi):
@staticmethod
def in_current_assignments(assignee, role, dataverse_api: DataverseApi):
current_assignments = dataverse_api.get_role_assignments()
found = False
for current_assignment in current_assignments:
Expand All @@ -42,7 +43,6 @@ def in_current_assignments(self, assignee, role, dataverse_api: DataverseApi):
break
return found


def remove_role_assignment(self, role_assignment, dataverse_api: DataverseApi, csv_report):
assignee = role_assignment.split('=')[0]
role = role_assignment.split('=')[1]
Expand Down
46 changes: 25 additions & 21 deletions src/datastation/dv_dataverse_role_assignment.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,36 +8,40 @@


def list_role_assignments(args, dataverse_client: DataverseClient):
role_assignment = DataverseRole(dataverse_client)
role_assignment.list_role_assignments(args.alias)
DataverseRole(dataverse_client, args.dry_run).list_role_assignments(args.alias)


def add_role_assignments(args, dataverse_client: DataverseClient):
role_assignment = DataverseRole(dataverse_client, args.dry_run)
aliases = get_aliases(args.alias_or_alias_file)
batch_processor = DataverseBatchProcessorWithReport(wait=args.wait, fail_on_first_error=args.fail_fast,
report_file=args.report_file,
headers=['alias', 'Modified', 'Assignee', 'Role', 'Change'])
batch_processor.process_aliases(aliases,
lambda alias,
csv_report: role_assignment.add_role_assignment(args.role_assignment,
dataverse_api=
dataverse_client.dataverse(
alias),
csv_report=csv_report))
create_batch_processor(args).process_aliases(
aliases,
lambda alias,
csv_report: role_assignment.add_role_assignment(args.role_assignment,
dataverse_api=dataverse_client.dataverse(alias),
csv_report=csv_report)
)


def remove_role_assignments(args, dataverse_client: DataverseClient):
role_assignment = DataverseRole(dataverse_client, args.dry_run)
aliases = get_aliases(args.alias_or_alias_file)
batch_processor = DataverseBatchProcessorWithReport(wait=args.wait, report_file=args.report_file,
headers=['alias', 'Modified', 'Assignee', 'Role', 'Change'])
batch_processor.process_aliases(aliases,
lambda alias,
csv_report: role_assignment.remove_role_assignment(args.role_assignment,
dataverse_api=
dataverse_client.dataverse(
alias),
csv_report=csv_report))
create_batch_processor(args).process_aliases(
aliases,
lambda alias,
csv_report: role_assignment.remove_role_assignment(args.role_assignment,
dataverse_api=dataverse_client.dataverse(alias),
csv_report=csv_report)
)


def create_batch_processor(args):
return DataverseBatchProcessorWithReport(
wait=args.wait,
fail_on_first_error=args.fail_fast,
report_file=args.report_file,
headers=['alias', 'Modified', 'Assignee', 'Role', 'Change']
)


def main():
Expand Down

0 comments on commit 7044877

Please sign in to comment.