Compare commits
5 Commits
7007478c3b
...
v1.2.0
| Author | SHA1 | Date | |
|---|---|---|---|
| f1dd9bd184 | |||
| dd2ac058af | |||
| 98c749477c | |||
| 92973da658 | |||
| f82978e2e8 |
5
.gitignore
vendored
5
.gitignore
vendored
@@ -175,4 +175,7 @@ cython_debug/
|
||||
.pypirc
|
||||
|
||||
/individual_images
|
||||
*.xlsx
|
||||
*.xlsx
|
||||
*.csv
|
||||
*.snirf
|
||||
*.json
|
||||
16
changelog.md
16
changelog.md
@@ -1,7 +1,9 @@
|
||||
# Version 1.2.0
|
||||
|
||||
- This is a save-breaking release due to a new save file format. Please update your project files to ensure compatibility. Fixes [Issue 30](https://git.research.dezeeuw.ca/tyler/flares/issues/30)
|
||||
- Added new parameters to the right side of the screen
|
||||
- These parameters include SHOW_OPTODE_NAMES, SECONDS_TO_STRIP_HR, MAX_LOW_HR, MAX_HIGH_HR, SMOOTHING_WINDOW_HR, HEART_RATE_WINDOW, BAD_CHANNELS_HANDLING, MAX_DIST, MIN_NEIGHBORS, L_TRANS_BANDWIDTH, H_TRANS_BANDWIDTH, RESAMPLE, RESAMPLE_FREQ, STIM_DUR, HRF_MODEL, HIGH_PASS, DRIFT_ORDER, FIR_DELAYS, MIN_ONSET, OVERSAMPLING, SHORT_CHANNEL_REGRESSION, NOISE_MODEL, BINS, and VERBOSITY.
|
||||
- Certain parameters now have dependencies on other parameters and will now grey out if they are not used
|
||||
- All the new parameters have default values matching the underlying values in version 1.1.7
|
||||
- The order of the parameters have changed to match the order that the code runs when the Process button is clicked
|
||||
- Moved TIME_WINDOW_START and TIME_WINDOW_END to the 'Other' category
|
||||
@@ -15,6 +17,20 @@
|
||||
- Fixed the User Guide window to properly display information about the 24 stages and added a link to the Git wiki page
|
||||
- MAX_WORKERS should now properly repect the value set
|
||||
- Added a new CSV export option to be used by other applications
|
||||
- Added support for updating optode positions directly from an .xlsx file from a Polhemius system
|
||||
- Fixed an issue where the dropdowns in the Viewer windows would immediately open and close when using a trackpad
|
||||
- glover and spm hrf models now function as intended without crashing. Currently, group analysis is still only supported by fir. Fixes [Issue 8](https://git.research.dezeeuw.ca/tyler/flares/issues/8)
|
||||
- Clicking 'Clear' should now properly clear all data. Fixes [Issue 9](https://git.research.dezeeuw.ca/tyler/flares/issues/9)
|
||||
- Revamped the fold channels viewer to not hang the application and to better process multiple participants at once. Fixes [Issue 34](https://git.research.dezeeuw.ca/tyler/flares/issues/34), [Issue 31](https://git.research.dezeeuw.ca/tyler/flares/issues/31)
|
||||
- Added a Preferences menu to the navigation bar
|
||||
- Two preferences have been added allowing to bypass the warning of 2D data detected and save files being from previous, potentially breaking versions
|
||||
- Fixed a typo when saving a CSV that stated a SNIRF was being saved
|
||||
- Loading a save file now properly restores AGE, GENDER, and GROUP. Fixes [Issue 40](https://git.research.dezeeuw.ca/tyler/flares/issues/40)
|
||||
- Saving a project now no longer makes the main window go not responding. Fixes [Issue 43](https://git.research.dezeeuw.ca/tyler/flares/issues/43)
|
||||
- Memory usage should no longer grow when generating lots of images multiple times. Fixes [Issue 36](https://git.research.dezeeuw.ca/tyler/flares/issues/36)
|
||||
- Added a new option in the Analysis window for Functional Connectivity
|
||||
- Functional connectivity is still in development and the results should currently be taken with a grain of salt
|
||||
- A warning is displayed when entering the Functional Connectivity Viewer disclosing this
|
||||
|
||||
|
||||
# Version 1.1.7
|
||||
|
||||
359
flares.py
359
flares.py
@@ -47,9 +47,9 @@ from nilearn.glm.regression import OLSModel
|
||||
import statsmodels.formula.api as smf # type: ignore
|
||||
from statsmodels.stats.multitest import multipletests
|
||||
|
||||
from scipy import stats
|
||||
from scipy.spatial.distance import cdist
|
||||
from scipy.signal import welch, butter, filtfilt # type: ignore
|
||||
from scipy.stats import pearsonr, zscore, t
|
||||
|
||||
import pywt # type: ignore
|
||||
import neurokit2 as nk # type: ignore
|
||||
@@ -58,6 +58,7 @@ import neurokit2 as nk # type: ignore
|
||||
import pyvistaqt # type: ignore
|
||||
import vtkmodules.util.data_model
|
||||
import vtkmodules.util.execution_model
|
||||
import xlrd
|
||||
|
||||
# External library imports for mne
|
||||
from mne import (
|
||||
@@ -90,6 +91,9 @@ from mne_nirs.io.fold import fold_channel_specificity # type: ignore
|
||||
from mne_nirs.preprocessing import peak_power # type: ignore
|
||||
from mne_nirs.statistics._glm_level_first import RegressionResults # type: ignore
|
||||
|
||||
from mne_connectivity.viz import plot_connectivity_circle
|
||||
from mne_connectivity import envelope_correlation, spectral_connectivity_epochs, spectral_connectivity_time
|
||||
|
||||
# Needs to be set for mne
|
||||
os.environ["SUBJECTS_DIR"] = str(data_path()) + "/subjects" # type: ignore
|
||||
|
||||
@@ -123,8 +127,6 @@ SECONDS_TO_KEEP: float
|
||||
OPTODE_PLACEMENT: bool
|
||||
SHOW_OPTODE_NAMES: bool
|
||||
|
||||
HEART_RATE: bool
|
||||
|
||||
SHORT_CHANNEL: bool
|
||||
SHORT_CHANNEL_THRESH: float
|
||||
LONG_CHANNEL_THRESH: float
|
||||
@@ -189,9 +191,9 @@ TIME_WINDOW_END: int
|
||||
MAX_WORKERS: int
|
||||
VERBOSITY: bool
|
||||
|
||||
AGE = 25 # Assume 25 if not set from the GUI. This will result in a reasonable PPF
|
||||
GENDER = ""
|
||||
GROUP = "Default"
|
||||
AGE: int = 25 # Assume 25 if not set from the GUI. This will result in a reasonable PPF
|
||||
GENDER: str = ""
|
||||
GROUP: str = "Default"
|
||||
|
||||
# These are parameters that are required for the analysis
|
||||
REQUIRED_KEYS: dict[str, Any] = {
|
||||
@@ -928,11 +930,12 @@ def interpolate_fNIRS_bads_weighted_average(raw, max_dist=0.03, min_neighbors=2)
|
||||
raw.info['bads'] = [ch for ch in raw.info['bads'] if ch not in bad_ch_to_remove]
|
||||
|
||||
print("\nInterpolation complete.\n")
|
||||
print("Bads cleared:", raw.info['bads'])
|
||||
raw.info['bads'] = []
|
||||
|
||||
for ch in raw.info['bads']:
|
||||
print(f"Channel {ch} still marked as bad.")
|
||||
|
||||
print("Bads cleared:", raw.info['bads'])
|
||||
fig_raw_after = raw.plot(duration=raw.times[-1], n_channels=raw.info['nchan'], title="After interpolation", show=False)
|
||||
|
||||
return raw, fig_raw_after
|
||||
@@ -1333,7 +1336,7 @@ def make_design_matrix(raw_haemo, short_chans):
|
||||
drift_model=DRIFT_MODEL,
|
||||
high_pass=HIGH_PASS,
|
||||
drift_order=DRIFT_ORDER,
|
||||
fir_delays=range(15),
|
||||
fir_delays=FIR_DELAYS,
|
||||
add_regs=short_chans.get_data().T,
|
||||
add_reg_names=short_chans.ch_names,
|
||||
min_onset=MIN_ONSET,
|
||||
@@ -1347,7 +1350,7 @@ def make_design_matrix(raw_haemo, short_chans):
|
||||
drift_model=DRIFT_MODEL,
|
||||
high_pass=HIGH_PASS,
|
||||
drift_order=DRIFT_ORDER,
|
||||
fir_delays=range(15),
|
||||
fir_delays=FIR_DELAYS,
|
||||
min_onset=MIN_ONSET,
|
||||
oversampling=OVERSAMPLING
|
||||
)
|
||||
@@ -1659,8 +1662,11 @@ def fold_channels(raw: BaseRaw) -> None:
|
||||
landmark_color_map = {landmark: colors[i % len(colors)] for i, landmark in enumerate(landmarks)}
|
||||
|
||||
# Iterate over each channel
|
||||
print(len(hbo_channel_names))
|
||||
|
||||
for idx, channel_name in enumerate(hbo_channel_names):
|
||||
|
||||
|
||||
print(idx, channel_name)
|
||||
# Run the fOLD on the selected channel
|
||||
channel_data = raw.copy().pick(picks=channel_name) # type: ignore
|
||||
|
||||
@@ -1703,6 +1709,7 @@ def fold_channels(raw: BaseRaw) -> None:
|
||||
|
||||
landmark_specificity_data = []
|
||||
|
||||
|
||||
# TODO: Fix this
|
||||
if True:
|
||||
handles = [
|
||||
@@ -1725,8 +1732,9 @@ def fold_channels(raw: BaseRaw) -> None:
|
||||
for ax in axes[len(hbo_channel_names):]:
|
||||
ax.axis('off')
|
||||
|
||||
plt.show()
|
||||
return fig, legend_fig
|
||||
#plt.show()
|
||||
fig_dict = {"main": fig, "legend": legend_fig}
|
||||
return convert_fig_dict_to_png_bytes(fig_dict)
|
||||
|
||||
|
||||
|
||||
@@ -2246,8 +2254,14 @@ def brain_3d_visualization(raw_haemo, df_cha, selected_event, t_or_theta: Litera
|
||||
# Get all activity conditions
|
||||
for cond in [f'{selected_event}']:
|
||||
|
||||
if True:
|
||||
ch_summary = df_cha.query(f"Condition.str.startswith('{cond}_delay_') and Chroma == 'hbo'", engine='python') # type: ignore
|
||||
ch_summary = df_cha.query(f"Condition.str.startswith('{cond}_delay_') and Chroma == 'hbo'", engine='python') # type: ignore
|
||||
|
||||
print(ch_summary)
|
||||
|
||||
if ch_summary.empty:
|
||||
#not fir model
|
||||
print("No data found for this condition.")
|
||||
ch_summary = df_cha.query(f"Condition in [@cond] and Chroma == 'hbo'", engine='python')
|
||||
|
||||
# Use ordinary least squares (OLS) if only one participant
|
||||
# TODO: Fix.
|
||||
@@ -2269,6 +2283,9 @@ def brain_3d_visualization(raw_haemo, df_cha, selected_event, t_or_theta: Litera
|
||||
valid_channels = ch_summary["ch_name"].unique().tolist() # type: ignore
|
||||
raw_for_plot = raw_haemo.copy().pick(picks=valid_channels) # type: ignore
|
||||
|
||||
print(f"DEBUG: Model DF rows: {len(model_df)}")
|
||||
print(f"DEBUG: Raw channels: {len(raw_for_plot.ch_names)}")
|
||||
|
||||
brain = plot_3d_evoked_array(raw_for_plot.pick(picks="hbo"), model_df, view="dorsal", distance=0.02, colorbar=True, clim=clim, mode="weighted", size=(800, 700)) # type: ignore
|
||||
|
||||
if show_optodes == 'all' or show_optodes == 'sensors':
|
||||
@@ -2804,7 +2821,7 @@ def run_second_level_analysis(df_contrasts, raw, p, bounds):
|
||||
result = model.fit(Y)
|
||||
|
||||
t_val = result.t(0).item()
|
||||
p_val = 2 * stats.t.sf(np.abs(t_val), df=result.df_model)
|
||||
p_val = 2 * t.sf(np.abs(t_val), df=result.df_model)
|
||||
mean_beta = np.mean(Y)
|
||||
|
||||
group_results.append({
|
||||
@@ -3299,7 +3316,7 @@ def hr_calc(raw):
|
||||
# --- Parameters for PSD ---
|
||||
desired_bin_hz = 0.1
|
||||
nperseg = int(sfreq / desired_bin_hz)
|
||||
hr_range = (30, 180)
|
||||
hr_range = (30, 180) # TODO: SHould this not use the user defined values?
|
||||
|
||||
# --- Function to find strongest local peak ---
|
||||
def find_hr_from_psd(ch_data):
|
||||
@@ -3343,6 +3360,7 @@ def process_participant(file_path, progress_callback=None):
|
||||
logger.info("Step 1 Completed.")
|
||||
|
||||
# Step 2: Trimming
|
||||
# TODO: Clean this into a method
|
||||
if TRIM:
|
||||
if hasattr(raw, 'annotations') and len(raw.annotations) > 0:
|
||||
# Get time of first event
|
||||
@@ -3527,16 +3545,18 @@ def process_participant(file_path, progress_callback=None):
|
||||
logger.info("19")
|
||||
|
||||
# Step 20: Generate GLM Results
|
||||
fig_glm_result = plot_glm_results(file_path, raw_haemo, glm_est, design_matrix)
|
||||
for name, fig in fig_glm_result:
|
||||
fig_individual[f"GLM {name}"] = fig
|
||||
if "derivative" not in HRF_MODEL.lower():
|
||||
fig_glm_result = plot_glm_results(file_path, raw_haemo, glm_est, design_matrix)
|
||||
for name, fig in fig_glm_result:
|
||||
fig_individual[f"GLM {name}"] = fig
|
||||
if progress_callback: progress_callback(20)
|
||||
logger.info("20")
|
||||
|
||||
# Step 21: Generate Channel Significance
|
||||
fig_significance = individual_significance(raw_haemo, glm_est)
|
||||
for name, fig in fig_significance:
|
||||
fig_individual[f"Significance {name}"] = fig
|
||||
if HRF_MODEL == "fir":
|
||||
fig_significance = individual_significance(raw_haemo, glm_est)
|
||||
for name, fig in fig_significance:
|
||||
fig_individual[f"Significance {name}"] = fig
|
||||
if progress_callback: progress_callback(21)
|
||||
logger.info("21")
|
||||
|
||||
@@ -3568,30 +3588,31 @@ def process_participant(file_path, progress_callback=None):
|
||||
[(column, contrast_matrix[i]) for i, column in enumerate(design_matrix.columns)]
|
||||
)
|
||||
|
||||
all_delay_cols = [col for col in design_matrix.columns if "_delay_" in col]
|
||||
all_conditions = sorted({col.split("_delay_")[0] for col in all_delay_cols})
|
||||
if HRF_MODEL == "fir":
|
||||
all_delay_cols = [col for col in design_matrix.columns if "_delay_" in col]
|
||||
all_conditions = sorted({col.split("_delay_")[0] for col in all_delay_cols})
|
||||
|
||||
if not all_conditions:
|
||||
raise ValueError("No FIR regressors found in the design matrix.")
|
||||
if not all_conditions:
|
||||
raise ValueError("No FIR regressors found in the design matrix.")
|
||||
|
||||
# Build contrast vectors for each condition
|
||||
contrast_dict = {}
|
||||
# Build contrast vectors for each condition
|
||||
contrast_dict = {}
|
||||
|
||||
for condition in all_conditions:
|
||||
delay_cols = [
|
||||
col for col in all_delay_cols
|
||||
if col.startswith(f"{condition}_delay_") and
|
||||
TIME_WINDOW_START <= int(col.split("_delay_")[-1]) <= TIME_WINDOW_END
|
||||
]
|
||||
for condition in all_conditions:
|
||||
delay_cols = [
|
||||
col for col in all_delay_cols
|
||||
if col.startswith(f"{condition}_delay_") and
|
||||
TIME_WINDOW_START <= int(col.split("_delay_")[-1]) <= TIME_WINDOW_END
|
||||
]
|
||||
|
||||
if not delay_cols:
|
||||
continue # skip if no columns found (shouldn't happen?)
|
||||
if not delay_cols:
|
||||
continue # skip if no columns found (shouldn't happen?)
|
||||
|
||||
# Average across all delay regressors for this condition
|
||||
contrast_vector = np.sum([basic_conts[col] for col in delay_cols], axis=0)
|
||||
contrast_vector /= len(delay_cols)
|
||||
# Average across all delay regressors for this condition
|
||||
contrast_vector = np.sum([basic_conts[col] for col in delay_cols], axis=0)
|
||||
contrast_vector /= len(delay_cols)
|
||||
|
||||
contrast_dict[condition] = contrast_vector
|
||||
contrast_dict[condition] = contrast_vector
|
||||
|
||||
if progress_callback: progress_callback(22)
|
||||
logger.info("22")
|
||||
@@ -3599,11 +3620,13 @@ def process_participant(file_path, progress_callback=None):
|
||||
# Step 23: Compute Contrast Results
|
||||
contrast_results = {}
|
||||
|
||||
for cond, contrast_vector in contrast_dict.items():
|
||||
contrast = glm_est.compute_contrast(contrast_vector) # type: ignore
|
||||
df = contrast.to_dataframe()
|
||||
df["ID"] = file_path
|
||||
contrast_results[cond] = df
|
||||
if HRF_MODEL == "fir":
|
||||
|
||||
for cond, contrast_vector in contrast_dict.items():
|
||||
contrast = glm_est.compute_contrast(contrast_vector) # type: ignore
|
||||
df = contrast.to_dataframe()
|
||||
df["ID"] = file_path
|
||||
contrast_results[cond] = df
|
||||
|
||||
cha["ID"] = file_path
|
||||
|
||||
@@ -3617,8 +3640,18 @@ def process_participant(file_path, progress_callback=None):
|
||||
|
||||
if progress_callback: progress_callback(25)
|
||||
logger.info("25")
|
||||
|
||||
return raw_haemo, epochs, fig_bytes, cha, contrast_results, df_ind, design_matrix, AGE, GENDER, GROUP, True
|
||||
|
||||
# TODO: Tidy up
|
||||
# Extract the parameters this file was ran with. No need to return age, gender, group?
|
||||
config = {
|
||||
k: globals()[k]
|
||||
for k in __annotations__
|
||||
if k in globals() and k != "REQUIRED_KEYS"
|
||||
}
|
||||
|
||||
print(config)
|
||||
|
||||
return raw_haemo, config, epochs, fig_bytes, cha, contrast_results, df_ind, design_matrix, True
|
||||
|
||||
|
||||
def sanitize_paths_for_pickle(raw_haemo, epochs):
|
||||
@@ -3628,4 +3661,234 @@ def sanitize_paths_for_pickle(raw_haemo, epochs):
|
||||
|
||||
# Fix epochs._raw._filenames
|
||||
if hasattr(epochs, '_raw') and hasattr(epochs._raw, '_filenames'):
|
||||
epochs._raw._filenames = [str(p) for p in epochs._raw._filenames]
|
||||
epochs._raw._filenames = [str(p) for p in epochs._raw._filenames]
|
||||
|
||||
|
||||
def functional_connectivity_spectral_epochs(epochs, n_lines, vmin):
|
||||
|
||||
# will crash without this load
|
||||
epochs.load_data()
|
||||
hbo_epochs = epochs.copy().pick(picks="hbo")
|
||||
data = hbo_epochs.get_data()
|
||||
names = hbo_epochs.ch_names
|
||||
sfreq = hbo_epochs.info["sfreq"]
|
||||
con = spectral_connectivity_epochs(
|
||||
data,
|
||||
method=["coh", "plv"],
|
||||
mode="multitaper",
|
||||
sfreq=sfreq,
|
||||
fmin=0.04,
|
||||
fmax=0.2,
|
||||
faverage=True,
|
||||
verbose=True
|
||||
)
|
||||
|
||||
con_coh, con_plv = con
|
||||
|
||||
coh = con_coh.get_data(output="dense").squeeze()
|
||||
plv = con_plv.get_data(output="dense").squeeze()
|
||||
|
||||
np.fill_diagonal(coh, 0)
|
||||
np.fill_diagonal(plv, 0)
|
||||
|
||||
plot_connectivity_circle(
|
||||
coh,
|
||||
names,
|
||||
title="fNIRS Functional Connectivity (HbO - Coherence)",
|
||||
n_lines=n_lines,
|
||||
vmin=vmin
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def functional_connectivity_spectral_time(epochs, n_lines, vmin):
|
||||
|
||||
# will crash without this load
|
||||
epochs.load_data()
|
||||
hbo_epochs = epochs.copy().pick(picks="hbo")
|
||||
data = hbo_epochs.get_data()
|
||||
names = hbo_epochs.ch_names
|
||||
sfreq = hbo_epochs.info["sfreq"]
|
||||
|
||||
freqs = np.linspace(0.04, 0.2, 10)
|
||||
n_cycles = freqs * 2
|
||||
|
||||
con = spectral_connectivity_time(
|
||||
data,
|
||||
freqs=freqs,
|
||||
method=["coh", "plv"],
|
||||
mode="multitaper",
|
||||
sfreq=sfreq,
|
||||
fmin=0.04,
|
||||
fmax=0.2,
|
||||
n_cycles=n_cycles,
|
||||
faverage=True,
|
||||
verbose=True
|
||||
)
|
||||
|
||||
con_coh, con_plv = con
|
||||
|
||||
coh = con_coh.get_data(output="dense").squeeze()
|
||||
plv = con_plv.get_data(output="dense").squeeze()
|
||||
|
||||
np.fill_diagonal(coh, 0)
|
||||
np.fill_diagonal(plv, 0)
|
||||
|
||||
plot_connectivity_circle(
|
||||
coh,
|
||||
names,
|
||||
title="fNIRS Functional Connectivity (HbO - Coherence)",
|
||||
n_lines=n_lines,
|
||||
vmin=vmin
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
def functional_connectivity_envelope(epochs, n_lines, vmin):
|
||||
# will crash without this load
|
||||
|
||||
epochs.load_data()
|
||||
hbo_epochs = epochs.copy().pick(picks="hbo")
|
||||
data = hbo_epochs.get_data()
|
||||
|
||||
|
||||
env = envelope_correlation(
|
||||
data,
|
||||
orthogonalize=False,
|
||||
absolute=True
|
||||
)
|
||||
env_data = env.get_data(output="dense")
|
||||
|
||||
env_corr = env_data.mean(axis=0)
|
||||
|
||||
env_corr = np.squeeze(env_corr)
|
||||
|
||||
np.fill_diagonal(env_corr, 0)
|
||||
|
||||
plot_connectivity_circle(
|
||||
env_corr,
|
||||
hbo_epochs.ch_names,
|
||||
title="fNIRS HbO Envelope Correlation (Task Connectivity)",
|
||||
n_lines=n_lines,
|
||||
vmin=vmin
|
||||
)
|
||||
|
||||
|
||||
def functional_connectivity_betas(raw_hbo, n_lines, vmin, event_name=None):
|
||||
|
||||
raw_hbo = raw_hbo.copy().pick(picks="hbo")
|
||||
onsets = raw_hbo.annotations.onset
|
||||
|
||||
# CRITICAL: Update the Raw object's annotations so the GLM sees unique events
|
||||
ann = raw_hbo.annotations
|
||||
new_desc = []
|
||||
|
||||
for i, desc in enumerate(ann.description):
|
||||
new_desc.append(f"{desc}__trial_{i:03d}")
|
||||
|
||||
ann.description = np.array(new_desc)
|
||||
|
||||
|
||||
# shoudl use user defiuned!!!!
|
||||
design_matrix = make_first_level_design_matrix(
|
||||
raw=raw_hbo,
|
||||
hrf_model='fir',
|
||||
fir_delays=np.arange(0, 12, 1),
|
||||
drift_model='cosine',
|
||||
drift_order=1
|
||||
)
|
||||
|
||||
|
||||
# 3. Run GLM & Extract Betas
|
||||
glm_results = run_glm(raw_hbo, design_matrix)
|
||||
betas = np.array(glm_results.theta())
|
||||
reg_names = list(design_matrix.columns)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
n_channels = betas.shape[0]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 5. Find unique trial tags (optionally filtered by event)
|
||||
# ------------------------------------------------------------------
|
||||
trial_tags = sorted({
|
||||
col.split("_delay")[0]
|
||||
for col in reg_names
|
||||
if (
|
||||
("__trial_" in col)
|
||||
and (event_name is None or col.startswith(event_name + "__"))
|
||||
)
|
||||
})
|
||||
|
||||
if len(trial_tags) == 0:
|
||||
raise ValueError(f"No trials found for event_name={event_name}")
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 6. Build beta series (average across FIR delays per trial)
|
||||
# ------------------------------------------------------------------
|
||||
beta_series = np.zeros((n_channels, len(trial_tags)))
|
||||
|
||||
for t, tag in enumerate(trial_tags):
|
||||
idx = [
|
||||
i for i, col in enumerate(reg_names)
|
||||
if col.startswith(f"{tag}_delay")
|
||||
]
|
||||
beta_series[:, t] = np.mean(betas[:, idx], axis=1).flatten()
|
||||
|
||||
|
||||
|
||||
# n_channels, n_trials = betas.shape[0], len(onsets)
|
||||
# beta_series = np.zeros((n_channels, n_trials))
|
||||
|
||||
# for t in range(n_trials):
|
||||
# trial_indices = [i for i, col in enumerate(reg_names) if col.startswith(f"trial_{t:03d}_delay")]
|
||||
# if trial_indices:
|
||||
# beta_series[:, t] = np.mean(betas[:, trial_indices], axis=1).flatten()
|
||||
|
||||
# Normalize each channel so they are on the same scale
|
||||
# Without this, everything is connected to everything. Apparently this is a big issue in fNIRS?
|
||||
beta_series = zscore(beta_series, axis=1)
|
||||
|
||||
global_signal = np.mean(beta_series, axis=0)
|
||||
beta_series_clean = np.zeros_like(beta_series)
|
||||
for i in range(n_channels):
|
||||
slope, _ = np.polyfit(global_signal, beta_series[i, :], 1)
|
||||
beta_series_clean[i, :] = beta_series[i, :] - (slope * global_signal)
|
||||
|
||||
# 4. Correlation & Strict Filtering
|
||||
corr_matrix = np.zeros((n_channels, n_channels))
|
||||
p_matrix = np.ones((n_channels, n_channels))
|
||||
|
||||
for i in range(n_channels):
|
||||
for j in range(i + 1, n_channels):
|
||||
r, p = pearsonr(beta_series_clean[i, :], beta_series_clean[j, :])
|
||||
corr_matrix[i, j] = corr_matrix[j, i] = r
|
||||
p_matrix[i, j] = p_matrix[j, i] = p
|
||||
|
||||
# 5. High-Bar Thresholding
|
||||
reject, _ = multipletests(p_matrix[np.triu_indices(n_channels, k=1)], method='fdr_bh', alpha=0.05)[:2]
|
||||
sig_corr_matrix = np.zeros_like(corr_matrix)
|
||||
triu = np.triu_indices(n_channels, k=1)
|
||||
|
||||
for idx, is_sig in enumerate(reject):
|
||||
r_val = corr_matrix[triu[0][idx], triu[1][idx]]
|
||||
# Only keep the absolute strongest connections
|
||||
if is_sig and abs(r_val) > 0.7:
|
||||
sig_corr_matrix[triu[0][idx], triu[1][idx]] = r_val
|
||||
sig_corr_matrix[triu[1][idx], triu[0][idx]] = r_val
|
||||
|
||||
# 6. Plot
|
||||
plot_connectivity_circle(
|
||||
sig_corr_matrix,
|
||||
raw_hbo.ch_names,
|
||||
title="Strictly Filtered Connectivity (TDDR + GSR + Z-Score)",
|
||||
n_lines=None,
|
||||
vmin=0.7,
|
||||
vmax=1.0,
|
||||
colormap='hot' # Use 'hot' to make positive connections pop
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user