Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| c7d044beed | |||
| 76df19f332 | |||
| 22695a2281 |
22
changelog.md
22
changelog.md
@@ -1,3 +1,23 @@
|
||||
# Version 1.2.1
|
||||
|
||||
- Added a requirements.txt file to ensure compatibility
|
||||
- Added new options 'Missing Events Bypass' and 'Analysis Clearing Bypass' to the Preferences Menu
|
||||
- Missing Events Bypass allows comparing events in the Group Viewers even if not all participants in the group have the event present. Fixes [Issue 28](https://git.research.dezeeuw.ca/tyler/flares/issues/28)
|
||||
- Clicking Process after an analysis has been performed will now clear the existing analysis by default with a popup warning that the analysis will be cleared
|
||||
- Analysis Clearing Bypass will prevent the popup and will not clear the existing analysis data. Fixes [Issue 41](https://git.research.dezeeuw.ca/tyler/flares/issues/41)
|
||||
- Clicking 'Clear' should now actually properly clear all data. Hopefully fixes [Issue 9](https://git.research.dezeeuw.ca/tyler/flares/issues/9) for good
|
||||
- Setting SHORT_CHANNEL to False will now grey out SHORT_CHANNEL_REGRESSION, as it is impossible to regress what does not exist. Sets SHORT_CHANNEL_REGRESSION to False under the hood when it is greyed out regardless of what is displayed. Fixes [Issue 47](https://git.research.dezeeuw.ca/tyler/flares/issues/47)
|
||||
- Projects can now be saves if files have different parent folders. Fixes [Issue 48](https://git.research.dezeeuw.ca/tyler/flares/issues/48)
|
||||
- It is no longer possible to attempt a save before any data has been processed. A popup will now display if a save is attempted with nothing to save
|
||||
- Fixed a bug where LONG_CHANNEL_THRESH was not being applied in the processing steps
|
||||
- Added a new option in the Analysis window for Group Functional Connectivity. Implements [Issue 50](https://git.research.dezeeuw.ca/tyler/flares/issues/50)
|
||||
- Group Functional connectivity is still in development and the results should currently be taken with a grain of salt
|
||||
- A warning is displayed when entering the Group Functional Connectivity Viewer disclosing this
|
||||
- Fixed a bug when updating optode positions that would prevent .txt files from being selected. Fixes [Issue 54](https://git.research.dezeeuw.ca/tyler/flares/issues/54)
|
||||
- Fixed a bug where the secondary download server would never get contacted if the primary failed
|
||||
- Automatic downloads will now ignore prerelease versions. Fixes [Issue 52](https://git.research.dezeeuw.ca/tyler/flares/issues/52)
|
||||
|
||||
|
||||
# Version 1.2.0
|
||||
|
||||
- This is a save-breaking release due to a new save file format. Please update your project files to ensure compatibility. Fixes [Issue 30](https://git.research.dezeeuw.ca/tyler/flares/issues/30)
|
||||
@@ -121,7 +141,7 @@
|
||||
- Added a group option when clicking on a participant's file
|
||||
- If no group is specified, the participant will be added to the "Default" group
|
||||
- Added option to update the optode positions in a snirf file from the Options menu (F6)
|
||||
- Fixed [Issue 3](https://git.research.dezeeuw.ca/tyler/flares/issues/3), [Issue 4](https://git.research.dezeeuw.ca/tyler/flares/issues/4), [Issue 17](https://git.research.dezeeuw.ca/tyler/flares/issues/17), [Issue 21](https://git.research.dezeeuw.ca/tyler/flares/issues/21), [Issue 22](https://git.research.dezeeuw.ca/tyler/flares/issues/22)
|
||||
- Fixed [Issue 3](https://git.research.dezeeuw.ca/tyler/flares/issues/3), [Issue 5](https://git.research.dezeeuw.ca/tyler/flares/issues/5), [Issue 17](https://git.research.dezeeuw.ca/tyler/flares/issues/17), [Issue 21](https://git.research.dezeeuw.ca/tyler/flares/issues/21), [Issue 22](https://git.research.dezeeuw.ca/tyler/flares/issues/22)
|
||||
|
||||
|
||||
# Version 1.0.1
|
||||
|
||||
190
flares.py
190
flares.py
@@ -3403,7 +3403,7 @@ def process_participant(file_path, progress_callback=None):
|
||||
fig_individual["short"] = fig_short_chans
|
||||
else:
|
||||
short_chans = None
|
||||
get_long_channels(raw, min_dist=SHORT_CHANNEL_THRESH, max_dist=LONG_CHANNEL_THRESH) # Don't update the existing raw
|
||||
raw = get_long_channels(raw, min_dist=0, max_dist=LONG_CHANNEL_THRESH) # keep both short channels and all channels up to the threshold length
|
||||
if progress_callback: progress_callback(4)
|
||||
logger.info("Step 4 Completed.")
|
||||
|
||||
@@ -3892,3 +3892,191 @@ def functional_connectivity_betas(raw_hbo, n_lines, vmin, event_name=None):
|
||||
vmax=1.0,
|
||||
colormap='hot' # Use 'hot' to make positive connections pop
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
def get_single_subject_beta_corr(raw_hbo, event_name=None, config=None):
|
||||
"""Processes one participant and returns their correlation matrix."""
|
||||
raw_hbo = raw_hbo.copy().pick(picks="hbo")
|
||||
ann = raw_hbo.annotations
|
||||
|
||||
# Rename for trial-level GLM
|
||||
new_desc = [f"{desc}__trial_{i:03d}" for i, desc in enumerate(ann.description)]
|
||||
ann.description = np.array(new_desc)
|
||||
|
||||
if config == None:
|
||||
print("no config")
|
||||
design_matrix = make_first_level_design_matrix(
|
||||
raw=raw_hbo, hrf_model='fir',
|
||||
fir_delays=np.arange(0, 12, 1),
|
||||
drift_model='cosine', drift_order=1
|
||||
)
|
||||
else:
|
||||
print("config")
|
||||
if config.get("SHORT_CHANNEL_REGRESSION") == True:
|
||||
short_chans = get_short_channels(raw_hbo, max_dist=config.get("SHORT_CHANNEL_THRESH"))
|
||||
|
||||
design_matrix = make_first_level_design_matrix(
|
||||
raw=raw_hbo,
|
||||
stim_dur=config.get("STIM_DUR"),
|
||||
hrf_model=config.get("HRF_MODEL"),
|
||||
drift_model=config.get("DRIFT_MODEL"),
|
||||
high_pass=config.get("HIGH_PASS"),
|
||||
drift_order=config.get("DRIFT_ORDER"),
|
||||
fir_delays=config.get("FIR_DELAYS"),
|
||||
add_regs=short_chans.get_data().T,
|
||||
add_reg_names=short_chans.ch_names,
|
||||
min_onset=config.get("MIN_ONSET"),
|
||||
oversampling=config.get("OVERSAMPLING")
|
||||
)
|
||||
print("yep")
|
||||
else:
|
||||
design_matrix = make_first_level_design_matrix(
|
||||
raw=raw_hbo,
|
||||
stim_dur=config.get("STIM_DUR"),
|
||||
hrf_model=config.get("HRF_MODEL"),
|
||||
drift_model=config.get("DRIFT_MODEL"),
|
||||
high_pass=config.get("HIGH_PASS"),
|
||||
drift_order=config.get("DRIFT_ORDER"),
|
||||
fir_delays=config.get("FIR_DELAYS"),
|
||||
min_onset=config.get("MIN_ONSET"),
|
||||
oversampling=config.get("OVERSAMPLING")
|
||||
)
|
||||
|
||||
|
||||
glm_results = run_glm(raw_hbo, design_matrix)
|
||||
betas = np.array(glm_results.theta())
|
||||
reg_names = list(design_matrix.columns)
|
||||
n_channels = betas.shape[0]
|
||||
|
||||
# Filter trials by event name
|
||||
trial_tags = sorted({
|
||||
col.split("_delay")[0] for col in reg_names
|
||||
if "__trial_" in col and (event_name is None or col.startswith(event_name + "__"))
|
||||
})
|
||||
|
||||
if not trial_tags:
|
||||
return None, None
|
||||
|
||||
# Build Beta Series
|
||||
beta_series = np.zeros((n_channels, len(trial_tags)))
|
||||
for t, tag in enumerate(trial_tags):
|
||||
idx = [i for i, col in enumerate(reg_names) if col.startswith(f"{tag}_delay")]
|
||||
beta_series[:, t] = np.mean(betas[:, idx], axis=1).flatten()
|
||||
#beta_series[:, t] = np.max(betas[:, idx], axis=1).flatten() #TODO: Figure out which one to use
|
||||
|
||||
# Z-score and GSR (Global Signal Regression)
|
||||
beta_series = zscore(beta_series, axis=1)
|
||||
global_signal = np.mean(beta_series, axis=0)
|
||||
for i in range(n_channels):
|
||||
slope, _ = np.polyfit(global_signal, beta_series[i, :], 1)
|
||||
beta_series[i, :] -= (slope * global_signal)
|
||||
|
||||
# Correlation Matrix
|
||||
corr_matrix = np.corrcoef(beta_series)
|
||||
return corr_matrix, raw_hbo.ch_names
|
||||
|
||||
|
||||
def run_group_functional_connectivity(haemo_dict, config_dict, selected_paths, event_name, n_lines, vmin):
|
||||
"""Aggregates multiple participants and triggers the plot."""
|
||||
all_z_matrices = []
|
||||
common_names = None
|
||||
|
||||
for path in selected_paths:
|
||||
raw = haemo_dict.get(path)
|
||||
config = config_dict.get(path)
|
||||
if raw is None: continue
|
||||
print(config)
|
||||
|
||||
corr, names = get_single_subject_beta_corr(raw, event_name, config)
|
||||
|
||||
if corr is not None:
|
||||
# Fisher Z-transform for averaging
|
||||
z_mat = np.arctanh(np.clip(corr, -0.99, 0.99))
|
||||
all_z_matrices.append(z_mat)
|
||||
common_names = names
|
||||
|
||||
from scipy.stats import ttest_1samp
|
||||
# 1. Convert list to 3D array: (Participants, Channels, Channels)
|
||||
group_z_data = np.array(all_z_matrices)
|
||||
|
||||
print("1")
|
||||
# 2. Perform a T-Test across the participant dimension (axis 0)
|
||||
# We test if the mean Z-score is different from 0
|
||||
# C:\Users\tyler\Desktop\research\.venv\Lib\site-packages\scipy\stats\_axis_nan_policy.py:611: RuntimeWarning: Precision loss occurred in moment calculation due to catastrophic cancellation. This occurs when the data are nearly identical. Results may be unreliable.
|
||||
# res = hypotest_fun_out(*samples, axis=axis, **kwds)
|
||||
|
||||
print("--- Variance Check ---")
|
||||
|
||||
# ADD THIS LINE: Define n_channels based on the data shape
|
||||
# group_z_data.shape is (n_participants, n_channels, n_channels)
|
||||
n_channels = group_z_data.shape[1]
|
||||
|
||||
variance_matrix = np.var(group_z_data, axis=0)
|
||||
|
||||
# Find where variance is exactly 0 (or very close to it)
|
||||
zero_var_indices = np.where(variance_matrix < 1e-15)
|
||||
coords = list(zip(zero_var_indices[0], zero_var_indices[1]))
|
||||
|
||||
diag_count = 0
|
||||
non_diag_pairs = []
|
||||
|
||||
for r, c in coords:
|
||||
if r == c:
|
||||
diag_count += 1
|
||||
else:
|
||||
non_diag_pairs.append((r, c))
|
||||
|
||||
print(f"Total pairs with zero variance: {len(coords)}")
|
||||
print(f"Identical diagonals: {diag_count}/{n_channels}")
|
||||
|
||||
if non_diag_pairs:
|
||||
print(f"Warning: {len(non_diag_pairs)} non-diagonal pairs have zero variance!")
|
||||
for r, c in non_diag_pairs[:10]: # Print first 10
|
||||
print(f" - Pair: Channel {r} & Channel {c}")
|
||||
else:
|
||||
print("Clean! Zero variance only exists on the diagonals.")
|
||||
print("----------------------")
|
||||
|
||||
t_stats, p_values = ttest_1samp(group_z_data, popmean=0, axis=0)
|
||||
print("2")
|
||||
|
||||
# 3. Multiple Comparisons Correction (FDR)
|
||||
# We only care about the upper triangle (unique connections)
|
||||
n_channels = p_values.shape[0]
|
||||
triu_indices = np.triu_indices(n_channels, k=1)
|
||||
flat_p = p_values[triu_indices]
|
||||
|
||||
reject, corrected_p = multipletests(flat_p, method='fdr_bh', alpha=0.05)[:2]
|
||||
|
||||
# 4. Create the final "Significant" Matrix
|
||||
avg_r = np.tanh(np.mean(group_z_data, axis=0))
|
||||
sig_avg_r = np.zeros_like(avg_r)
|
||||
|
||||
# Only keep connections that are Significant AND above your VMIN (r-threshold)
|
||||
for idx, is_sig in enumerate(reject):
|
||||
row, col = triu_indices[0][idx], triu_indices[1][idx]
|
||||
r_val = avg_r[row, col]
|
||||
|
||||
if is_sig and abs(r_val) >= vmin:
|
||||
sig_avg_r[row, col] = sig_avg_r[col, row] = r_val
|
||||
|
||||
# 5. Plot the significant results
|
||||
|
||||
|
||||
# if not all_z_matrices:
|
||||
# return
|
||||
|
||||
# # Average and convert back to R
|
||||
# avg_z = np.mean(all_z_matrices, axis=0)
|
||||
# avg_r = np.tanh(avg_z)
|
||||
|
||||
# # Thresholding
|
||||
# avg_r[np.abs(avg_r) < vmin] = 0
|
||||
|
||||
plot_connectivity_circle(
|
||||
sig_avg_r, common_names, n_lines=n_lines,
|
||||
title=f"Group Connectivity: {event_name if event_name else 'All Events'}",
|
||||
vmin=vmin, vmax=1.0, colormap='hot'
|
||||
)
|
||||
BIN
requirements.txt
Normal file
BIN
requirements.txt
Normal file
Binary file not shown.
Reference in New Issue
Block a user