From a3792302617ace9e932fe1ca78937c9cb8d7ac92 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Sat, 27 Apr 2024 13:53:14 -0400 Subject: [PATCH 1/3] Detrend data before running PCA. --- tedana/decomposition/pca.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tedana/decomposition/pca.py b/tedana/decomposition/pca.py index b252ef264..e09c685f9 100644 --- a/tedana/decomposition/pca.py +++ b/tedana/decomposition/pca.py @@ -6,6 +6,7 @@ import numpy as np import pandas as pd from mapca import MovingAveragePCA +from nilearn.signal import standardize_signal from scipy import stats from sklearn.decomposition import PCA @@ -204,12 +205,11 @@ def tedpca( f"Computing PCA of optimally combined multi-echo data with selection criteria: {algorithm}" ) data = data_oc[mask, :] - - data_z = ((data.T - data.T.mean(axis=0)) / data.T.std(axis=0)).T # var normalize ts - data_z = (data_z - data_z.mean()) / data_z.std() # var normalize everything + data_z = standardize_signal(data.T, detrend=True, standardize="zscore_sample").T + # data_z = (data_z - data_z.mean()) / data_z.std() # var normalize everything if algorithm in ["mdl", "aic", "kic"]: - data_img = io.new_nii_like(io_generator.reference_img, utils.unmask(data, mask)) + data_img = io.new_nii_like(io_generator.reference_img, utils.unmask(data_z, mask)) mask_img = io.new_nii_like(io_generator.reference_img, mask.astype(int)) ma_pca = MovingAveragePCA(criterion=algorithm, normalize=True) _ = ma_pca.fit_transform(data_img, mask_img) From 55e161d6b47f865aa5dfcc0f1b7685b34d65d30a Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Sat, 27 Apr 2024 14:32:01 -0400 Subject: [PATCH 2/3] Update pca.py --- tedana/decomposition/pca.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tedana/decomposition/pca.py b/tedana/decomposition/pca.py index e09c685f9..d4c18326d 100644 --- a/tedana/decomposition/pca.py +++ b/tedana/decomposition/pca.py @@ -206,7 +206,7 @@ def tedpca( ) data = data_oc[mask, :] data_z = standardize_signal(data.T, detrend=True, standardize="zscore_sample").T - # data_z = (data_z - data_z.mean()) / data_z.std() # var normalize everything + data_z = (data_z - data_z.mean()) / data_z.std() # var normalize everything if algorithm in ["mdl", "aic", "kic"]: data_img = io.new_nii_like(io_generator.reference_img, utils.unmask(data_z, mask)) From 9874edeb69b7455c50db7cf605e7d95a085c6fb5 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Sun, 28 Apr 2024 10:45:34 -0400 Subject: [PATCH 3/3] Simplify code. --- tedana/decomposition/pca.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/tedana/decomposition/pca.py b/tedana/decomposition/pca.py index d4c18326d..73b867627 100644 --- a/tedana/decomposition/pca.py +++ b/tedana/decomposition/pca.py @@ -144,9 +144,9 @@ def tedpca( - Nonsignificant :math:`{\kappa}` and :math:`{\rho}`. - Nonsignificant variance explained. + Generated Files --------------- - =========================== ============================================= Default Filename Content =========================== ============================================= @@ -205,11 +205,16 @@ def tedpca( f"Computing PCA of optimally combined multi-echo data with selection criteria: {algorithm}" ) data = data_oc[mask, :] - data_z = standardize_signal(data.T, detrend=True, standardize="zscore_sample").T - data_z = (data_z - data_z.mean()) / data_z.std() # var normalize everything + if algorithm in ["mdl", "aic", "kic"]: + # Detrend the data, but don't z-score, if using MAPCA + data = standardize_signal(data.T, detrend=True, standardize=False).T + else: + # Z-score the data otherwise + data = standardize_signal(data.T, detrend=True, standardize="zscore_sample").T + data = (data - data.mean()) / data.std() # var normalize everything if algorithm in ["mdl", "aic", "kic"]: - data_img = io.new_nii_like(io_generator.reference_img, utils.unmask(data_z, mask)) + data_img = io.new_nii_like(io_generator.reference_img, utils.unmask(data, mask)) mask_img = io.new_nii_like(io_generator.reference_img, mask.astype(int)) ma_pca = MovingAveragePCA(criterion=algorithm, normalize=True) _ = ma_pca.fit_transform(data_img, mask_img) @@ -315,23 +320,23 @@ def tedpca( elif isinstance(algorithm, Number): ppca = PCA(copy=False, n_components=algorithm, svd_solver="full") - ppca.fit(data_z) + ppca.fit(data) comp_ts = ppca.components_.T varex = ppca.explained_variance_ - voxel_comp_weights = np.dot(np.dot(data_z, comp_ts), np.diag(1.0 / varex)) + voxel_comp_weights = np.dot(np.dot(data, comp_ts), np.diag(1.0 / varex)) varex_norm = ppca.explained_variance_ratio_ elif low_mem: - voxel_comp_weights, varex, varex_norm, comp_ts = low_mem_pca(data_z) + voxel_comp_weights, varex, varex_norm, comp_ts = low_mem_pca(data) else: # If algorithm is kundu or kundu-stablize component metrics # are calculated without dimensionality estimation and # reduction and then kundu identifies components that are # to be accepted or rejected ppca = PCA(copy=False, n_components=(n_vols - 1)) - ppca.fit(data_z) + ppca.fit(data) comp_ts = ppca.components_.T varex = ppca.explained_variance_ - voxel_comp_weights = np.dot(np.dot(data_z, comp_ts), np.diag(1.0 / varex)) + voxel_comp_weights = np.dot(np.dot(data, comp_ts), np.diag(1.0 / varex)) varex_norm = ppca.explained_variance_ratio_ # Compute Kappa and Rho for PCA comps