Skip to content

Commit

Permalink
Merge branch 'main' into fix-collection
Browse files Browse the repository at this point in the history
  • Loading branch information
tsalo authored Sep 25, 2023
2 parents 7c9603d + da6d866 commit 8ffe6cb
Show file tree
Hide file tree
Showing 3 changed files with 70 additions and 37 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -138,16 +138,15 @@
#}
{% include "anatomical_registration_plots.html.jinja" %}

{# Carpet/line plot for pre- and post-regression, concatenate across runs. #}
{% include "concatenated_task_static_plots.html.jinja" %}

{#
Task static plots. One section per run of each task.
1. Task in T1
2. T1 in Task
3. BOLD mean(?) image on the left
4. BOLD reference image on the left
3/4. Pre and post regression carpet/line plots on right.
"Functional Data" section, with BOLD figures.
1. Concatenated resting-state carpet plots.
2. One section per run of each task.
1. Task in T1
2. T1 in Task
3. BOLD mean(?) image on the left
4. BOLD reference image on the left
3/4. Pre and post regression carpet/line plots on right.
#}
{% include "task_static_plots.html.jinja" %}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@
Start the tasks section and put in the column headings for the task-specific data.
Inputs:
- task_files[]{"task"}
- task_files[]{"run"}
- concatenated_rest_files{"preproc_carpet"}
- concatenated_rest_files{"postproc_carpet"}
- task_files[]{"key"}
- task_files[]{"registration_files"}
- task_files[]{"registration_titles"}
- task_files[]{"bold"}
Expand All @@ -22,11 +23,14 @@
<div class="w3-container">
<div class="w3-row-padding">
<div class="w3-center"><h2>Functional Data</h2></div>

{# Carpet/line plot for pre- and post-regression, concatenate across runs. #}
{% include "concatenated_task_static_plots.html.jinja" %}

<div>
{% for run_dict in task_files %}

{% set task = run_dict["task"] %}
{% set run = run_dict["run"] %}
{% set key = run_dict["key"] %}
{% set registration_files = run_dict["registration_files"] %}
{% set registration_titles = run_dict["registration_titles"] %}
{% set bold = run_dict["bold"] %}
Expand All @@ -38,7 +42,7 @@
Add the task name for the next few rows.
#}
<div class="w3-row"></div>
<div class="w3-left label2">task-{{ task }} run-{{ run }}:</div>
<div class="w3-left label2">{{ key }}:</div>
<div class="w3-row"></div>

{# Full rows for registration files #}
Expand Down
76 changes: 53 additions & 23 deletions xcp_d/interfaces/execsummary.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
import re
from pathlib import Path

import numpy as np
import pandas as pd
from bids.layout import BIDSLayout, Query
from bs4 import BeautifulSoup
from jinja2 import Environment, FileSystemLoader, Markup
Expand Down Expand Up @@ -144,6 +146,24 @@ def collect_inputs(self):

self.structural_files_ = structural_files

# Collect figures for concatenated resting-state data (if any)
concatenated_rest_files = {}

query = {
"subject": self.subject_id,
"task": "rest",
"run": Query.NONE,
"desc": "preprocESQC",
"suffix": "bold",
"extension": ".svg",
}
concatenated_rest_files["preproc_carpet"] = self._get_bids_file(query)

query["desc"] = "postprocESQC"
concatenated_rest_files["postproc_carpet"] = self._get_bids_file(query)

self.concatenated_rest_files_ = concatenated_rest_files

# Determine the unique entity-sets for the task data.
postproc_files = self.layout.get(
subject=self.subject_id,
Expand All @@ -166,34 +186,42 @@ def collect_inputs(self):
task_entity_sets = []
for entity_set in unique_entity_sets:
for entity in ORDERING:
entity_set[entity] = entity_set.get(entity, Query.NONE)
entity_set[entity] = entity_set.get(entity, np.nan)

task_entity_sets.append(entity_set)

concatenated_rest_files = {}
# Now sort the entity sets by each entity
task_entity_sets = pd.DataFrame(task_entity_sets)
task_entity_sets = task_entity_sets.sort_values(by=task_entity_sets.columns.tolist())
task_entity_sets = task_entity_sets.fillna(Query.NONE)

query = {
"subject": self.subject_id,
"task": "rest",
"run": Query.NONE,
"desc": "preprocESQC",
"suffix": "bold",
"extension": ".svg",
}
concatenated_rest_files["preproc_carpet"] = self._get_bids_file(query)

query["desc"] = "postcarpetplot"
concatenated_rest_files["postproc_carpet"] = self._get_bids_file(query)
# Extract entities with variability
# This lets us name the sections based on multiple entities (not just task and run)
nunique = task_entity_sets.nunique()
nunique.loc["task"] = 2 # ensure we keep task
nunique.loc["run"] = 2 # ensure we keep run
cols_to_drop = nunique[nunique == 1].index
task_entity_namer = task_entity_sets.drop(cols_to_drop, axis=1)

self.concatenated_rest_files_ = concatenated_rest_files
# Convert back to dictionary
task_entity_sets = task_entity_sets.to_dict(orient="records")
task_entity_namer = task_entity_namer.to_dict(orient="records")

task_files = []

for task_entity_set in task_entity_sets:
task_file_figures = task_entity_set.copy()
task_file_figures[
"key"
] = f"task-{task_entity_set['task']}_run-{task_entity_set.get('run', 0)}"
for i_set, task_entity_set in enumerate(task_entity_sets):
task_file_figures = {}

# Convert any floats in the name to ints
temp_dict = {}
for k, v in task_entity_namer[i_set].items():
try:
temp_dict[k] = int(v)
except (ValueError, TypeError):
temp_dict[k] = v

# String used for subsection headers
task_file_figures["key"] = " ".join([f"{k}-{v}" for k, v in temp_dict.items()])

query = {
"subject": self.subject_id,
Expand Down Expand Up @@ -222,10 +250,12 @@ def collect_inputs(self):

task_file_figures["registration_files"].append(found_file)

task_files.append(task_file_figures)
# If there no mean BOLD figure, then the "run" was made by the concatenation workflow.
# Skip the concatenated resting-state scan, since it has its own section.
if query["task"] == "rest" and not task_file_figures["bold"]:
continue

# Sort the files by the desired key
task_files = sorted(task_files, key=lambda d: d["key"])
task_files.append(task_file_figures)

self.task_files_ = task_files

Expand Down

0 comments on commit 8ffe6cb

Please sign in to comment.