Skip to content

Commit

Permalink
temptative update parsers w context
Browse files Browse the repository at this point in the history
  • Loading branch information
cmaumet committed Sep 4, 2023
1 parent 2364ee4 commit f5b12cb
Show file tree
Hide file tree
Showing 3 changed files with 111 additions and 111 deletions.
96 changes: 48 additions & 48 deletions bids_prov/afni/afni_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,45 +193,45 @@ def build_records(commands_bloc: list, agent_id: str, verbose: bool = False):
label = f"{os.path.split(a_name)[1]}"

activity = {
"@id": f"urn:{get_id()}",
"label": label_mapping(label, "afni/afni_labels.json"),
"associatedWith": "urn:" + agent_id,
"command": cmd,
"parameters": param_dic,
"used": list(),
"Id": f"urn:{get_id()}",
"Label": label_mapping(label, "afni/afni_labels.json"),
"AssociatedWith": "urn:" + agent_id,
"Command": cmd,
"Parameters": param_dic,
"Used": list(),
}

for input_path in inputs:
input_id = f"urn:{get_id()}" # def format_id
existing_input = next(
(entity for entity in records["prov:Entity"] if entity["prov:atLocation"] == input_path), None)
(entity for entity in records["Entities"] if entity["AtLocation"] == input_path), None)

if existing_input is None:
new_label = os.path.split(input_path)[1]
new_label_rename = clean_label_suffix(new_label)
ent = {
"@id": input_id,
"label": new_label_rename,
"prov:atLocation": input_path,
"Id": input_id,
"Label": new_label_rename,
"AtLocation": input_path,
}
records["prov:Entity"].append(ent)
activity["used"].append(input_id)
records["Entities"].append(ent)
activity["Used"].append(input_id)
else:
activity["used"].append(existing_input["@id"])
activity["Used"].append(existing_input["Id"])

for output_path in outputs:
records["prov:Entity"].append(
{
"@id": f"urn:{get_id()}",
"label": os.path.split(output_path)[1],
"prov:atLocation": output_path,
"generatedBy": activity["@id"],
"Id": f"urn:{get_id()}",
"Label": os.path.split(output_path)[1],
"AtLocation": output_path,
"GeneratedBy": activity["Id"],
# "derivedFrom": input_id,
}
)
bloc_act.append((bloc, activity["@id"]))
bloc_act.append((bloc, activity["Id"]))

records["prov:Activity"].append(activity)
records["Activities"].append(activity)
if verbose:
print('-------------------------')

Expand Down Expand Up @@ -328,8 +328,8 @@ def get_activities_by_ids(graph, ids):
list of activities
"""
activities = []
for activity in graph["records"]["prov:Activity"]:
if activity["@id"] in ids:
for activity in graph["Records"]["Activities"]:
if activity["Id"] in ids:
activities.append(activity)
return activities

Expand All @@ -356,15 +356,15 @@ def fusion_activities(activities, label):
command = ""

for activity in activities:
used_entities.extend(activity["used"])
command += activity["command"] + "; "
used_entities.extend(activity["Used"])
command += activity["Command"] + "; "

return {
"@id": f"urn:{get_id()}",
"label": label,
"associatedWith": activities[0]["associatedWith"],
"command": command,
"used": used_entities,
"Id": f"urn:{get_id()}",
"Label": label,
"AssociatedWith": activities[0]["associatedWith"],
"Command": command,
"Used": used_entities,
}


Expand All @@ -389,34 +389,34 @@ def get_extern_entities_from_activities(graph, activities, id_fusion_activity):
List extern entities
"""
if len(activities) > 0:
activities_ids = [act["@id"] for act in activities]
activities_ids = [act["Id"] for act in activities]
used_ents_ids = []
for act in activities:
used_ents_ids.extend(act["used"])
used_ents_ids.extend(act["Used"])
used_ents_ids = set(used_ents_ids)

used_ents = []
generated_entities = []
for ent in graph["records"]["prov:Entity"]:
if ent["@id"] in used_ents_ids:
if "generatedBy" in ent:
if ent["generatedBy"] not in activities_ids:
for ent in graph["Records"]["Entities"]:
if ent["Id"] in used_ents_ids:
if "GeneratedBy" in ent:
if ent["GeneratedBy"] not in activities_ids:
used_ents.append(ent)
else:
used_ents.append(ent)

if "generatedBy" in ent:
if ent["generatedBy"] in activities_ids:
if ent["@id"] not in used_ents_ids:
if "GeneratedBy" in ent:
if ent["GeneratedBy"] in activities_ids:
if ent["Id"] not in used_ents_ids:
generated_entities.append(ent)

# for ent in used_ents:
# if "generatedBy" in ent:
# ent["generatedBy"] = id_fusion_activity

for ent in generated_entities:
if "generatedBy" in ent:
ent["generatedBy"] = id_fusion_activity
if "GeneratedBy" in ent:
ent["GeneratedBy"] = id_fusion_activity

return used_ents + generated_entities

Expand Down Expand Up @@ -454,8 +454,8 @@ def afni_to_bids_prov(filename: str, context_url=CONTEXT_URL, output_file=None,
graph, agent_id = get_default_graph(label="AFNI", context_url=context_url, soft_ver=soft_ver)
records, bloc_act = build_records(commands_bloc, agent_id, verbose=verbose)

graph["records"].update(records)
compute_sha_256_entity(graph["records"]["prov:Entity"])
graph["Records"].update(records)
compute_sha_256_entity(graph["Records"]["Entities"])

if with_blocs:
bl_name = list(OrderedDict.fromkeys(bl for (bl, id) in bloc_act))
Expand All @@ -470,18 +470,18 @@ def afni_to_bids_prov(filename: str, context_url=CONTEXT_URL, output_file=None,
activities = get_activities_by_ids(graph_bloc, bloc["act_ids"])
fus_activities = fusion_activities(activities, bloc["bloc_name"])
ext_entities = get_extern_entities_from_activities(
graph_bloc, activities, fus_activities["@id"])
graph_bloc, activities, fus_activities["Id"])
for ent in ext_entities:
if ent["@id"] not in entities_blocs:
if ent["Id"] not in entities_blocs:
entities_blocs.append(ent)

for ent_used in fus_activities["used"]:
if ent_used not in [id_["@id"] for id_ in ext_entities]:
fus_activities["used"].remove(ent_used)
for ent_used in fus_activities["Used"]:
if ent_used not in [id_["Id"] for id_ in ext_entities]:
fus_activities["Used"].remove(ent_used)
activities_blocs.append(fus_activities)

graph_bloc["records"]["prov:Activity"] = activities_blocs
graph_bloc["records"]["prov:Entity"] = entities_blocs
graph_bloc["Records"]["Activities"] = activities_blocs
graph_bloc["Records"]["Entities"] = entities_blocs

return writing_jsonld(graph_bloc, indent, output_file)

Expand Down
52 changes: 26 additions & 26 deletions bids_prov/fsl/fsl_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -332,17 +332,17 @@ def get_entities(cmd_s, parameters):

# print("\n\n cmd_s change", cmd_s)

if "used" in parameters:
if "Used" in parameters:
add_argument_list, inputs_kwarg = _get_kwarg(
parameters["used"])
parameters["Used"])

for kwarg in add_argument_list:
arg = kwarg.pop("arg")
parser.add_argument(arg, **kwarg)

if "generatedBy" in parameters:
if "GeneratedBy" in parameters:
add_argument_list, outputs_kwarg = _get_kwarg(
parameters["generatedBy"])
parameters["GeneratedBy"])
for kwarg in add_argument_list:
arg = kwarg.pop("arg")
parser.add_argument(arg, **kwarg)
Expand Down Expand Up @@ -382,11 +382,11 @@ def get_entities(cmd_s, parameters):
params = _get_entities_from_kwarg(params, opts, parameters_value)
params = _get_entities_from_kwarg(params, opts, parameters_no_value)

if "used" in parameters:
inputs.extend(_get_arg(parameters["used"], arg_rest))
if "Used" in parameters:
inputs.extend(_get_arg(parameters["Used"], arg_rest))

if "generatedBy" in parameters:
outputs.extend(_get_arg(parameters["generatedBy"], arg_rest))
if "GeneratedBy" in parameters:
outputs.extend(_get_arg(parameters["GeneratedBy"], arg_rest))

# print("\n\n inputs", inputs)
# print("\n\n outputs", outputs)
Expand Down Expand Up @@ -475,46 +475,46 @@ def build_records(groups: Mapping[str, List[str]], agent_id: str):
label = f"{os.path.split(a_name)[1]}"

a = {
"@id": f"urn:{get_id()}",
"label": label_mapping(label, "fsl/fsl_labels.json"),
"associatedWith": "urn:" + agent_id,
"command": cmd,
"Id": f"urn:{get_id()}",
"Label": label_mapping(label, "fsl/fsl_labels.json"),
"AssociatedWith": "urn:" + agent_id,
"Command": cmd,
# "attributes": [
# {k: v if len(v) > 1 else v[0]} for k, v in attributes.items()
# ],
"used": list(),
"Used": list(),
}

for input_path in inputs:
# input_name = input_path.replace("/", "_") # TODO
input_id = f"urn:{get_id()}" # def format_id

existing_input = next(
(entity for entity in records["prov:Entity"] if entity["prov:atLocation"] == input_path), None)
(entity for entity in records["Entities"] if entity["AtLocation"] == input_path), None)
if existing_input is None:
e = {
"@id": input_id,
"label": os.path.split(input_path)[1],
"prov:atLocation": input_path,
"Id": input_id,
"Label": os.path.split(input_path)[1],
"AtLocation": input_path,
}
records["prov:Entity"].append(e)
a["used"].append(input_id)
records["Entities"].append(e)
a["Used"].append(input_id)
else:
a["used"].append(existing_input["@id"])
a["Used"].append(existing_input["Id"])

for output_path in outputs:
# output_name = output_path.replace("/", "_") # TODO
records["prov:Entity"].append(
records["Entities"].append(
{
"@id": f"urn:{get_id()}",
"label": os.path.split(output_path)[1],
"prov:atLocation": output_path,
"generatedBy": a["@id"],
"Id": f"urn:{get_id()}",
"Label": os.path.split(output_path)[1],
"AtLocation": output_path,
"GeneratedBy": a["Id"],
# "derivedFrom": input_id,
}
)

records["prov:Activity"].append(a)
records["Activities"].append(a)
return dict(records)


Expand Down
Loading

0 comments on commit f5b12cb

Please sign in to comment.