group fc
This commit is contained in:
@@ -9,6 +9,13 @@
|
|||||||
- Setting SHORT_CHANNEL to False will now grey out SHORT_CHANNEL_REGRESSION, as it is impossible to regress what does not exist. Sets SHORT_CHANNEL_REGRESSION to False under the hood when it is greyed out regardless of what is displayed. Fixes [Issue 47](https://git.research.dezeeuw.ca/tyler/flares/issues/47)
|
- Setting SHORT_CHANNEL to False will now grey out SHORT_CHANNEL_REGRESSION, as it is impossible to regress what does not exist. Sets SHORT_CHANNEL_REGRESSION to False under the hood when it is greyed out regardless of what is displayed. Fixes [Issue 47](https://git.research.dezeeuw.ca/tyler/flares/issues/47)
|
||||||
- Projects can now be saves if files have different parent folders. Fixes [Issue 48](https://git.research.dezeeuw.ca/tyler/flares/issues/48)
|
- Projects can now be saves if files have different parent folders. Fixes [Issue 48](https://git.research.dezeeuw.ca/tyler/flares/issues/48)
|
||||||
- It is no longer possible to attempt a save before any data has been processed. A popup will now display if a save is attempted with nothing to save
|
- It is no longer possible to attempt a save before any data has been processed. A popup will now display if a save is attempted with nothing to save
|
||||||
|
- Fixed a bug where LONG_CHANNEL_THRESH was not being applied in the processing steps
|
||||||
|
- Added a new option in the Analysis window for Group Functional Connectivity. Implements [Issue 50](https://git.research.dezeeuw.ca/tyler/flares/issues/50)
|
||||||
|
- Group Functional connectivity is still in development and the results should currently be taken with a grain of salt
|
||||||
|
- A warning is displayed when entering the Group Functional Connectivity Viewer disclosing this
|
||||||
|
- Fixed a bug when updating optode positions that would prevent .txt files from being selected. Fixes [Issue 54](https://git.research.dezeeuw.ca/tyler/flares/issues/54)
|
||||||
|
- Fixed a bug where the secondary download server would never get contacted if the primary failed
|
||||||
|
- Automatic downloads will now ignore prerelease versions. Fixes [Issue 52](https://git.research.dezeeuw.ca/tyler/flares/issues/52)
|
||||||
|
|
||||||
|
|
||||||
# Version 1.2.0
|
# Version 1.2.0
|
||||||
@@ -134,7 +141,7 @@
|
|||||||
- Added a group option when clicking on a participant's file
|
- Added a group option when clicking on a participant's file
|
||||||
- If no group is specified, the participant will be added to the "Default" group
|
- If no group is specified, the participant will be added to the "Default" group
|
||||||
- Added option to update the optode positions in a snirf file from the Options menu (F6)
|
- Added option to update the optode positions in a snirf file from the Options menu (F6)
|
||||||
- Fixed [Issue 3](https://git.research.dezeeuw.ca/tyler/flares/issues/3), [Issue 4](https://git.research.dezeeuw.ca/tyler/flares/issues/4), [Issue 17](https://git.research.dezeeuw.ca/tyler/flares/issues/17), [Issue 21](https://git.research.dezeeuw.ca/tyler/flares/issues/21), [Issue 22](https://git.research.dezeeuw.ca/tyler/flares/issues/22)
|
- Fixed [Issue 3](https://git.research.dezeeuw.ca/tyler/flares/issues/3), [Issue 5](https://git.research.dezeeuw.ca/tyler/flares/issues/5), [Issue 17](https://git.research.dezeeuw.ca/tyler/flares/issues/17), [Issue 21](https://git.research.dezeeuw.ca/tyler/flares/issues/21), [Issue 22](https://git.research.dezeeuw.ca/tyler/flares/issues/22)
|
||||||
|
|
||||||
|
|
||||||
# Version 1.0.1
|
# Version 1.0.1
|
||||||
|
|||||||
190
flares.py
190
flares.py
@@ -3403,7 +3403,7 @@ def process_participant(file_path, progress_callback=None):
|
|||||||
fig_individual["short"] = fig_short_chans
|
fig_individual["short"] = fig_short_chans
|
||||||
else:
|
else:
|
||||||
short_chans = None
|
short_chans = None
|
||||||
get_long_channels(raw, min_dist=SHORT_CHANNEL_THRESH, max_dist=LONG_CHANNEL_THRESH) # Don't update the existing raw
|
raw = get_long_channels(raw, min_dist=0, max_dist=LONG_CHANNEL_THRESH) # keep both short channels and all channels up to the threshold length
|
||||||
if progress_callback: progress_callback(4)
|
if progress_callback: progress_callback(4)
|
||||||
logger.info("Step 4 Completed.")
|
logger.info("Step 4 Completed.")
|
||||||
|
|
||||||
@@ -3892,3 +3892,191 @@ def functional_connectivity_betas(raw_hbo, n_lines, vmin, event_name=None):
|
|||||||
vmax=1.0,
|
vmax=1.0,
|
||||||
colormap='hot' # Use 'hot' to make positive connections pop
|
colormap='hot' # Use 'hot' to make positive connections pop
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def get_single_subject_beta_corr(raw_hbo, event_name=None, config=None):
|
||||||
|
"""Processes one participant and returns their correlation matrix."""
|
||||||
|
raw_hbo = raw_hbo.copy().pick(picks="hbo")
|
||||||
|
ann = raw_hbo.annotations
|
||||||
|
|
||||||
|
# Rename for trial-level GLM
|
||||||
|
new_desc = [f"{desc}__trial_{i:03d}" for i, desc in enumerate(ann.description)]
|
||||||
|
ann.description = np.array(new_desc)
|
||||||
|
|
||||||
|
if config == None:
|
||||||
|
print("no config")
|
||||||
|
design_matrix = make_first_level_design_matrix(
|
||||||
|
raw=raw_hbo, hrf_model='fir',
|
||||||
|
fir_delays=np.arange(0, 12, 1),
|
||||||
|
drift_model='cosine', drift_order=1
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print("config")
|
||||||
|
if config.get("SHORT_CHANNEL_REGRESSION") == True:
|
||||||
|
short_chans = get_short_channels(raw_hbo, max_dist=config.get("SHORT_CHANNEL_THRESH"))
|
||||||
|
|
||||||
|
design_matrix = make_first_level_design_matrix(
|
||||||
|
raw=raw_hbo,
|
||||||
|
stim_dur=config.get("STIM_DUR"),
|
||||||
|
hrf_model=config.get("HRF_MODEL"),
|
||||||
|
drift_model=config.get("DRIFT_MODEL"),
|
||||||
|
high_pass=config.get("HIGH_PASS"),
|
||||||
|
drift_order=config.get("DRIFT_ORDER"),
|
||||||
|
fir_delays=config.get("FIR_DELAYS"),
|
||||||
|
add_regs=short_chans.get_data().T,
|
||||||
|
add_reg_names=short_chans.ch_names,
|
||||||
|
min_onset=config.get("MIN_ONSET"),
|
||||||
|
oversampling=config.get("OVERSAMPLING")
|
||||||
|
)
|
||||||
|
print("yep")
|
||||||
|
else:
|
||||||
|
design_matrix = make_first_level_design_matrix(
|
||||||
|
raw=raw_hbo,
|
||||||
|
stim_dur=config.get("STIM_DUR"),
|
||||||
|
hrf_model=config.get("HRF_MODEL"),
|
||||||
|
drift_model=config.get("DRIFT_MODEL"),
|
||||||
|
high_pass=config.get("HIGH_PASS"),
|
||||||
|
drift_order=config.get("DRIFT_ORDER"),
|
||||||
|
fir_delays=config.get("FIR_DELAYS"),
|
||||||
|
min_onset=config.get("MIN_ONSET"),
|
||||||
|
oversampling=config.get("OVERSAMPLING")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
glm_results = run_glm(raw_hbo, design_matrix)
|
||||||
|
betas = np.array(glm_results.theta())
|
||||||
|
reg_names = list(design_matrix.columns)
|
||||||
|
n_channels = betas.shape[0]
|
||||||
|
|
||||||
|
# Filter trials by event name
|
||||||
|
trial_tags = sorted({
|
||||||
|
col.split("_delay")[0] for col in reg_names
|
||||||
|
if "__trial_" in col and (event_name is None or col.startswith(event_name + "__"))
|
||||||
|
})
|
||||||
|
|
||||||
|
if not trial_tags:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
# Build Beta Series
|
||||||
|
beta_series = np.zeros((n_channels, len(trial_tags)))
|
||||||
|
for t, tag in enumerate(trial_tags):
|
||||||
|
idx = [i for i, col in enumerate(reg_names) if col.startswith(f"{tag}_delay")]
|
||||||
|
beta_series[:, t] = np.mean(betas[:, idx], axis=1).flatten()
|
||||||
|
#beta_series[:, t] = np.max(betas[:, idx], axis=1).flatten() #TODO: Figure out which one to use
|
||||||
|
|
||||||
|
# Z-score and GSR (Global Signal Regression)
|
||||||
|
beta_series = zscore(beta_series, axis=1)
|
||||||
|
global_signal = np.mean(beta_series, axis=0)
|
||||||
|
for i in range(n_channels):
|
||||||
|
slope, _ = np.polyfit(global_signal, beta_series[i, :], 1)
|
||||||
|
beta_series[i, :] -= (slope * global_signal)
|
||||||
|
|
||||||
|
# Correlation Matrix
|
||||||
|
corr_matrix = np.corrcoef(beta_series)
|
||||||
|
return corr_matrix, raw_hbo.ch_names
|
||||||
|
|
||||||
|
|
||||||
|
def run_group_functional_connectivity(haemo_dict, config_dict, selected_paths, event_name, n_lines, vmin):
|
||||||
|
"""Aggregates multiple participants and triggers the plot."""
|
||||||
|
all_z_matrices = []
|
||||||
|
common_names = None
|
||||||
|
|
||||||
|
for path in selected_paths:
|
||||||
|
raw = haemo_dict.get(path)
|
||||||
|
config = config_dict.get(path)
|
||||||
|
if raw is None: continue
|
||||||
|
print(config)
|
||||||
|
|
||||||
|
corr, names = get_single_subject_beta_corr(raw, event_name, config)
|
||||||
|
|
||||||
|
if corr is not None:
|
||||||
|
# Fisher Z-transform for averaging
|
||||||
|
z_mat = np.arctanh(np.clip(corr, -0.99, 0.99))
|
||||||
|
all_z_matrices.append(z_mat)
|
||||||
|
common_names = names
|
||||||
|
|
||||||
|
from scipy.stats import ttest_1samp
|
||||||
|
# 1. Convert list to 3D array: (Participants, Channels, Channels)
|
||||||
|
group_z_data = np.array(all_z_matrices)
|
||||||
|
|
||||||
|
print("1")
|
||||||
|
# 2. Perform a T-Test across the participant dimension (axis 0)
|
||||||
|
# We test if the mean Z-score is different from 0
|
||||||
|
# C:\Users\tyler\Desktop\research\.venv\Lib\site-packages\scipy\stats\_axis_nan_policy.py:611: RuntimeWarning: Precision loss occurred in moment calculation due to catastrophic cancellation. This occurs when the data are nearly identical. Results may be unreliable.
|
||||||
|
# res = hypotest_fun_out(*samples, axis=axis, **kwds)
|
||||||
|
|
||||||
|
print("--- Variance Check ---")
|
||||||
|
|
||||||
|
# ADD THIS LINE: Define n_channels based on the data shape
|
||||||
|
# group_z_data.shape is (n_participants, n_channels, n_channels)
|
||||||
|
n_channels = group_z_data.shape[1]
|
||||||
|
|
||||||
|
variance_matrix = np.var(group_z_data, axis=0)
|
||||||
|
|
||||||
|
# Find where variance is exactly 0 (or very close to it)
|
||||||
|
zero_var_indices = np.where(variance_matrix < 1e-15)
|
||||||
|
coords = list(zip(zero_var_indices[0], zero_var_indices[1]))
|
||||||
|
|
||||||
|
diag_count = 0
|
||||||
|
non_diag_pairs = []
|
||||||
|
|
||||||
|
for r, c in coords:
|
||||||
|
if r == c:
|
||||||
|
diag_count += 1
|
||||||
|
else:
|
||||||
|
non_diag_pairs.append((r, c))
|
||||||
|
|
||||||
|
print(f"Total pairs with zero variance: {len(coords)}")
|
||||||
|
print(f"Identical diagonals: {diag_count}/{n_channels}")
|
||||||
|
|
||||||
|
if non_diag_pairs:
|
||||||
|
print(f"Warning: {len(non_diag_pairs)} non-diagonal pairs have zero variance!")
|
||||||
|
for r, c in non_diag_pairs[:10]: # Print first 10
|
||||||
|
print(f" - Pair: Channel {r} & Channel {c}")
|
||||||
|
else:
|
||||||
|
print("Clean! Zero variance only exists on the diagonals.")
|
||||||
|
print("----------------------")
|
||||||
|
|
||||||
|
t_stats, p_values = ttest_1samp(group_z_data, popmean=0, axis=0)
|
||||||
|
print("2")
|
||||||
|
|
||||||
|
# 3. Multiple Comparisons Correction (FDR)
|
||||||
|
# We only care about the upper triangle (unique connections)
|
||||||
|
n_channels = p_values.shape[0]
|
||||||
|
triu_indices = np.triu_indices(n_channels, k=1)
|
||||||
|
flat_p = p_values[triu_indices]
|
||||||
|
|
||||||
|
reject, corrected_p = multipletests(flat_p, method='fdr_bh', alpha=0.05)[:2]
|
||||||
|
|
||||||
|
# 4. Create the final "Significant" Matrix
|
||||||
|
avg_r = np.tanh(np.mean(group_z_data, axis=0))
|
||||||
|
sig_avg_r = np.zeros_like(avg_r)
|
||||||
|
|
||||||
|
# Only keep connections that are Significant AND above your VMIN (r-threshold)
|
||||||
|
for idx, is_sig in enumerate(reject):
|
||||||
|
row, col = triu_indices[0][idx], triu_indices[1][idx]
|
||||||
|
r_val = avg_r[row, col]
|
||||||
|
|
||||||
|
if is_sig and abs(r_val) >= vmin:
|
||||||
|
sig_avg_r[row, col] = sig_avg_r[col, row] = r_val
|
||||||
|
|
||||||
|
# 5. Plot the significant results
|
||||||
|
|
||||||
|
|
||||||
|
# if not all_z_matrices:
|
||||||
|
# return
|
||||||
|
|
||||||
|
# # Average and convert back to R
|
||||||
|
# avg_z = np.mean(all_z_matrices, axis=0)
|
||||||
|
# avg_r = np.tanh(avg_z)
|
||||||
|
|
||||||
|
# # Thresholding
|
||||||
|
# avg_r[np.abs(avg_r) < vmin] = 0
|
||||||
|
|
||||||
|
plot_connectivity_circle(
|
||||||
|
sig_avg_r, common_names, n_lines=n_lines,
|
||||||
|
title=f"Group Connectivity: {event_name if event_name else 'All Events'}",
|
||||||
|
vmin=vmin, vmax=1.0, colormap='hot'
|
||||||
|
)
|
||||||
194
main.py
194
main.py
@@ -426,14 +426,18 @@ class UpdateCheckThread(QThread):
|
|||||||
for url in urls:
|
for url in urls:
|
||||||
try:
|
try:
|
||||||
|
|
||||||
response = requests.get(API_URL, timeout=5)
|
response = requests.get(url, timeout=5)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
releases = response.json()
|
releases = response.json()
|
||||||
|
|
||||||
if not releases:
|
if not releases:
|
||||||
return None, None
|
continue
|
||||||
|
|
||||||
|
latest = next((r for r in releases if not r.get("prerelease") and not r.get("draft")), None)
|
||||||
|
|
||||||
|
if not latest:
|
||||||
|
continue
|
||||||
|
|
||||||
latest = releases[0]
|
|
||||||
tag = latest["tag_name"].lstrip("v")
|
tag = latest["tag_name"].lstrip("v")
|
||||||
|
|
||||||
for asset in latest.get("assets", []):
|
for asset in latest.get("assets", []):
|
||||||
@@ -2775,6 +2779,182 @@ class ParticipantFunctionalConnectivityWidget(FlaresBaseWidget):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class GroupFunctionalConnectivityWidget(FlaresBaseWidget):
|
||||||
|
def __init__(self, haemo_dict, group, config_dict):
|
||||||
|
super().__init__("GroupFunctionalConnectivityWidget")
|
||||||
|
self.setWindowTitle("FLARES Group Viewer")
|
||||||
|
self.haemo_dict = haemo_dict
|
||||||
|
self.group = group
|
||||||
|
self.config_dict = config_dict
|
||||||
|
self.show_all_events = True
|
||||||
|
self._updating_checkstates = False
|
||||||
|
|
||||||
|
QMessageBox.warning(self, "Warning - FLARES", f"Functional Connectivity is still in development and the results should currently be taken with a grain of salt. "
|
||||||
|
"By clicking OK, you accept that the images generated may not be factual.")
|
||||||
|
|
||||||
|
|
||||||
|
# Create mappings: file_path -> participant label and dropdown display text
|
||||||
|
self.participant_map = {} # file_path -> "Participant 1"
|
||||||
|
self.participant_dropdown_items = [] # "Participant 1 (filename)"
|
||||||
|
|
||||||
|
for i, file_path in enumerate(self.haemo_dict.keys(), start=1):
|
||||||
|
short_label = f"Participant {i}"
|
||||||
|
display_label = f"{short_label} ({os.path.basename(file_path)})"
|
||||||
|
self.participant_map[file_path] = short_label
|
||||||
|
self.participant_dropdown_items.append(display_label)
|
||||||
|
|
||||||
|
self.layout = QVBoxLayout(self)
|
||||||
|
self.top_bar = QHBoxLayout()
|
||||||
|
self.layout.addLayout(self.top_bar)
|
||||||
|
|
||||||
|
self.group_to_paths = {}
|
||||||
|
for file_path, group_name in self.group.items():
|
||||||
|
self.group_to_paths.setdefault(group_name, []).append(file_path)
|
||||||
|
|
||||||
|
self.group_names = sorted(self.group_to_paths.keys())
|
||||||
|
|
||||||
|
self.group_dropdown = QComboBox()
|
||||||
|
self.group_dropdown.addItem("<None Selected>")
|
||||||
|
self.group_dropdown.addItems(self.group_names)
|
||||||
|
self.group_dropdown.setCurrentIndex(0)
|
||||||
|
self.group_dropdown.currentIndexChanged.connect(self.update_participant_list_for_group)
|
||||||
|
|
||||||
|
self.participant_dropdown = self._create_multiselect_dropdown(self.participant_dropdown_items)
|
||||||
|
self.participant_dropdown.currentIndexChanged.connect(self.update_participant_dropdown_label)
|
||||||
|
self.participant_dropdown.setEnabled(False)
|
||||||
|
|
||||||
|
self.event_dropdown = QComboBox()
|
||||||
|
self.event_dropdown.addItem("<None Selected>")
|
||||||
|
|
||||||
|
self.index_texts = [
|
||||||
|
"0 (Betas)",
|
||||||
|
#"1 (Significance)",
|
||||||
|
#"2 (Brain Activity Visualization)",
|
||||||
|
# "3 (fourth image)",
|
||||||
|
]
|
||||||
|
|
||||||
|
self.image_index_dropdown = self._create_multiselect_dropdown(self.index_texts)
|
||||||
|
self.image_index_dropdown.currentIndexChanged.connect(self.update_image_index_dropdown_label)
|
||||||
|
|
||||||
|
self.submit_button = QPushButton("Submit")
|
||||||
|
self.submit_button.clicked.connect(self.show_brain_images)
|
||||||
|
|
||||||
|
self.top_bar.addWidget(QLabel("Group:"))
|
||||||
|
self.top_bar.addWidget(self.group_dropdown)
|
||||||
|
self.top_bar.addWidget(QLabel("Participants:"))
|
||||||
|
self.top_bar.addWidget(self.participant_dropdown)
|
||||||
|
self.top_bar.addWidget(QLabel("Event:"))
|
||||||
|
self.top_bar.addWidget(self.event_dropdown)
|
||||||
|
self.top_bar.addWidget(QLabel("Image Indexes:"))
|
||||||
|
self.top_bar.addWidget(self.image_index_dropdown)
|
||||||
|
self.top_bar.addWidget(self.submit_button)
|
||||||
|
|
||||||
|
self.scroll = QScrollArea()
|
||||||
|
self.scroll.setWidgetResizable(True)
|
||||||
|
self.scroll_content = QWidget()
|
||||||
|
self.grid_layout = QGridLayout(self.scroll_content)
|
||||||
|
self.scroll.setWidget(self.scroll_content)
|
||||||
|
self.layout.addWidget(self.scroll)
|
||||||
|
|
||||||
|
self.thumb_size = QSize(280, 180)
|
||||||
|
self.showMaximized()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def show_brain_images(self):
|
||||||
|
import flares
|
||||||
|
|
||||||
|
selected_event = self.event_dropdown.currentText()
|
||||||
|
if selected_event == "<None Selected>":
|
||||||
|
selected_event = None
|
||||||
|
|
||||||
|
selected_display_names = self._get_checked_items(self.participant_dropdown)
|
||||||
|
selected_file_paths = []
|
||||||
|
for display_name in selected_display_names:
|
||||||
|
for fp, short_label in self.participant_map.items():
|
||||||
|
expected_display = f"{short_label} ({os.path.basename(fp)})"
|
||||||
|
if display_name == expected_display:
|
||||||
|
selected_file_paths.append(fp)
|
||||||
|
break
|
||||||
|
|
||||||
|
if selected_event:
|
||||||
|
valid_paths = []
|
||||||
|
for fp in selected_file_paths:
|
||||||
|
raw = self.haemo_dict.get(fp)
|
||||||
|
# Check if this participant actually has the event in their annotations
|
||||||
|
if raw is not None and hasattr(raw, "annotations"):
|
||||||
|
if selected_event in raw.annotations.description:
|
||||||
|
valid_paths.append(fp)
|
||||||
|
|
||||||
|
selected_file_paths = valid_paths
|
||||||
|
|
||||||
|
selected_indexes = [
|
||||||
|
int(s.split(" ")[0]) for s in self._get_checked_items(self.image_index_dropdown)
|
||||||
|
]
|
||||||
|
|
||||||
|
if not selected_file_paths:
|
||||||
|
print("No participants selected.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Only keep indexes 0 and 1 that need parameters
|
||||||
|
parameterized_indexes = {
|
||||||
|
0: [
|
||||||
|
{
|
||||||
|
"key": "n_lines",
|
||||||
|
"label": "<Description>",
|
||||||
|
"default": "20",
|
||||||
|
"type": int,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "vmin",
|
||||||
|
"label": "<Description>",
|
||||||
|
"default": "0.9",
|
||||||
|
"type": float,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Inject full_text from index_texts
|
||||||
|
for idx, params_list in parameterized_indexes.items():
|
||||||
|
full_text = self.index_texts[idx] if idx < len(self.index_texts) else f"{idx} (No label found)"
|
||||||
|
for param_info in params_list:
|
||||||
|
param_info["full_text"] = full_text
|
||||||
|
|
||||||
|
indexes_needing_params = {idx: parameterized_indexes[idx] for idx in selected_indexes if idx in parameterized_indexes}
|
||||||
|
|
||||||
|
param_values = {}
|
||||||
|
if indexes_needing_params:
|
||||||
|
dialog = ParameterInputDialog(indexes_needing_params, parent=self)
|
||||||
|
if dialog.exec_() == QDialog.Accepted:
|
||||||
|
param_values = dialog.get_values()
|
||||||
|
if param_values is None:
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
|
for idx in selected_indexes:
|
||||||
|
if idx == 0:
|
||||||
|
params = param_values.get(idx, {})
|
||||||
|
n_lines = params.get("n_lines", None)
|
||||||
|
vmin = params.get("vmin", None)
|
||||||
|
|
||||||
|
if n_lines is None or vmin is None:
|
||||||
|
print(f"Missing parameters for index {idx}, skipping.")
|
||||||
|
continue
|
||||||
|
flares.run_group_functional_connectivity(self.haemo_dict, self.config_dict, selected_file_paths, selected_event, 50, 0.5)
|
||||||
|
elif idx == 1:
|
||||||
|
pass
|
||||||
|
elif idx == 2:
|
||||||
|
pass
|
||||||
|
elif idx == 3:
|
||||||
|
pass
|
||||||
|
|
||||||
|
else:
|
||||||
|
print(f"No method defined for index {idx}")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class MultiProgressDialog(QDialog):
|
class MultiProgressDialog(QDialog):
|
||||||
def __init__(self, parent=None):
|
def __init__(self, parent=None):
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
@@ -4005,6 +4185,9 @@ class ViewerLauncherWidget(QWidget):
|
|||||||
btn7 = QPushButton("Open Functional Connectivity Viewer [BETA]")
|
btn7 = QPushButton("Open Functional Connectivity Viewer [BETA]")
|
||||||
btn7.clicked.connect(lambda: self.open_participant_functional_connectivity_viewer(haemo_dict, epochs_dict))
|
btn7.clicked.connect(lambda: self.open_participant_functional_connectivity_viewer(haemo_dict, epochs_dict))
|
||||||
|
|
||||||
|
btn8 = QPushButton("Open Group Functional Connectivity Viewer [BETA]")
|
||||||
|
btn8.clicked.connect(lambda: self.open_group_functional_connectivity_viewer(haemo_dict, group_dict, config_dict))
|
||||||
|
|
||||||
btn4 = QPushButton("Open Inter-Group Viewer")
|
btn4 = QPushButton("Open Inter-Group Viewer")
|
||||||
btn4.clicked.connect(lambda: self.open_group_viewer(haemo_dict, cha_dict, df_ind, design_matrix, contrast_results_dict, group_dict))
|
btn4.clicked.connect(lambda: self.open_group_viewer(haemo_dict, cha_dict, df_ind, design_matrix, contrast_results_dict, group_dict))
|
||||||
|
|
||||||
@@ -4019,6 +4202,7 @@ class ViewerLauncherWidget(QWidget):
|
|||||||
layout.addWidget(btn2)
|
layout.addWidget(btn2)
|
||||||
layout.addWidget(btn3)
|
layout.addWidget(btn3)
|
||||||
layout.addWidget(btn7)
|
layout.addWidget(btn7)
|
||||||
|
layout.addWidget(btn8)
|
||||||
layout.addWidget(btn4)
|
layout.addWidget(btn4)
|
||||||
layout.addWidget(btn5)
|
layout.addWidget(btn5)
|
||||||
layout.addWidget(btn6)
|
layout.addWidget(btn6)
|
||||||
@@ -4039,6 +4223,10 @@ class ViewerLauncherWidget(QWidget):
|
|||||||
self.participant_brain_viewer = ParticipantFunctionalConnectivityWidget(haemo_dict, epochs_dict)
|
self.participant_brain_viewer = ParticipantFunctionalConnectivityWidget(haemo_dict, epochs_dict)
|
||||||
self.participant_brain_viewer.show()
|
self.participant_brain_viewer.show()
|
||||||
|
|
||||||
|
def open_group_functional_connectivity_viewer(self, haemo_dict, group, config_dict):
|
||||||
|
self.participant_brain_viewer = GroupFunctionalConnectivityWidget(haemo_dict, group, config_dict)
|
||||||
|
self.participant_brain_viewer.show()
|
||||||
|
|
||||||
def open_group_viewer(self, haemo_dict, cha_dict, df_ind, design_matrix, contrast_results_dict, group):
|
def open_group_viewer(self, haemo_dict, cha_dict, df_ind, design_matrix, contrast_results_dict, group):
|
||||||
self.participant_brain_viewer = GroupViewerWidget(haemo_dict, cha_dict, df_ind, design_matrix, contrast_results_dict, group)
|
self.participant_brain_viewer = GroupViewerWidget(haemo_dict, cha_dict, df_ind, design_matrix, contrast_results_dict, group)
|
||||||
self.participant_brain_viewer.show()
|
self.participant_brain_viewer.show()
|
||||||
|
|||||||
Reference in New Issue
Block a user