# HG changeset patch # User francesco_lapi # Date 1736937129 0 # Node ID c6d78b0d324db6c4432583e5e9cb5e702c3ca53f # Parent 049aa0f4844f121610fc7b9995297089053136ae Uploaded diff -r 049aa0f4844f -r c6d78b0d324d COBRAxy/flux_to_map.py --- a/COBRAxy/flux_to_map.py Mon Jan 13 15:16:18 2025 +0000 +++ b/COBRAxy/flux_to_map.py Wed Jan 15 10:32:09 2025 +0000 @@ -698,8 +698,8 @@ ks_statistic, p_value = st.ks_2samp(dataset1Data, dataset2Data) # Calculate means and standard deviations - mean1 = np.mean(dataset1Data) - mean2 = np.mean(dataset2Data) + mean1 = np.nanmean(dataset1Data) + mean2 = np.nanmean(dataset2Data) std1 = np.std(dataset1Data, ddof=1) std2 = np.std(dataset2Data, ddof=1) @@ -958,8 +958,8 @@ metabMap_median = copy.deepcopy(metabMap) # Compute medians and means - medians = {key: np.round(np.median(np.array(value), axis=1), 6) for key, value in class_pat.items()} - means = {key: np.round(np.mean(np.array(value), axis=1),6) for key, value in class_pat.items()} + medians = {key: np.round(np.nanmedian(np.array(value), axis=1), 6) for key, value in class_pat.items()} + means = {key: np.round(np.nanmean(np.array(value), axis=1),6) for key, value in class_pat.items()} # Normalize medians and means max_flux_medians = max(np.max(np.abs(arr)) for arr in medians.values())