diff --git a/.gitignore b/.gitignore index 259fbb5..6630d11 100644 --- a/.gitignore +++ b/.gitignore @@ -164,3 +164,7 @@ notebooks/calibrate_comprehensive_cat.ipynb notebooks/demo_apply_hsp_masks.py notebooks/demo_apply_hsp_masks.ipynb notebooks/plot_footprints.ipynb +notebooks/demo_comprehensive_to_minimal_cat.ipynb +notebooks/demo_create_footprint_mask.ipynb +notebooks/demo_check_footprint.ipynb +notebooks/demo_comprehensive_to_minimal_cat.ipynb diff --git a/notebooks/calibrate_comprehensive_cat.py b/notebooks/calibrate_comprehensive_cat.py index d9e14a9..8d5a968 100644 --- a/notebooks/calibrate_comprehensive_cat.py +++ b/notebooks/calibrate_comprehensive_cat.py @@ -7,7 +7,7 @@ # format_version: '1.5' # jupytext_version: 1.15.1 # kernelspec: -# display_name: sp_validation +# display_name: Python 3 # language: python # name: python3 # --- @@ -260,6 +260,9 @@ add_cols_data["e1_PSF"] = cat_gal["e1_PSF"] add_cols_data["e2_PSF"] = cat_gal["e2_PSF"] +add_cols_data["fwhm_PSF"] = cat.get_col( + dat, "fwhm_PSF", mask_combined._mask, mask_metacal +) # + # Add information to FITS header diff --git a/notebooks/cosmo_val/cat_config.yaml b/notebooks/cosmo_val/cat_config.yaml index 1d542ec..f5a8ca0 100644 --- a/notebooks/cosmo_val/cat_config.yaml +++ b/notebooks/cosmo_val/cat_config.yaml @@ -681,7 +681,7 @@ SP_v1.4.1_noleakage: SP_v1.4.5: pipeline: SP - subdir: UNIONS/v1.x/ShapePipe/v1.4.x/v1.4.5 + subdir: v1.4.x/v1.4.5 ls: dashed colour: blue marker: d @@ -698,7 +698,7 @@ SP_v1.4.5: e2_col: e2 e1_PSF_col: e1_PSF e2_PSF_col: e2_PSF - w_col: w_iv + w_col: w_des R: 1.0 redshift_distr: CFIS/v1.0/nz/dndz_SP_A.txt star: @@ -741,17 +741,17 @@ SP_v1.4.5_glass_mock: e2_col: e2 e1_PSF_col: e1_PSF e2_PSF_col: e2_PSF - w: w + w_col: w R: 1.0 redshift_distr: CFIS/v1.0/nz/dndz_SP_A.txt star: - path: unions_shapepipe_star_2024_v1.4.1.fits + path: unions_shapepipe_star_2024_v1.4.a.fits ra_col: RA dec_col: Dec e1_col: e1 e2_col: e2 psf: - path: unions_shapepipe_psf_2024_v1.4.1.fits + path: unions_shapepipe_psf_2024_v1.4.a.fits hdu: 1 ra_col: RA dec_col: Dec @@ -784,17 +784,17 @@ SP_test: e2_col: e2 e1_PSF_col: e1_PSF e2_PSF_col: e2_PSF - w: w_iv + w_col: w_iv R: 1.0 redshift_distr: CFIS/v1.0/nz/dndz_SP_A.txt star: - path: unions_shapepipe_star_2024_v1.4.1.fits + path: unions_shapepipe_star_2024_v1.4.a.fits ra_col: RA dec_col: Dec e1_col: e1 e2_col: e2 psf: - path: unions_shapepipe_psf_2024_v1.4.1.fits + path: unions_shapepipe_psf_2024_v1.4.a.fits hdu: 1 ra_col: RA dec_col: Dec @@ -810,7 +810,7 @@ SP_test: SP_v1.4.5_leak_corr: pipeline: SP - subdir: UNIONS/v1.x/ShapePipe/v1.4.x/v1.4.5 + subdir: v1.4.x/v1.4.5 ls: dashed colour: midnightblue marker: d @@ -851,9 +851,52 @@ SP_v1.4.5_leak_corr: star_flag: "FLAG_STAR_HSM" square_size: True +SP_v1.5.3: + pipeline: SP + subdir: CFIS/v1.0/ShapePipe/v1.5.x/v1.5.3 + ls: dashed + colour: blue + marker: v + getdist_colour: 0.0, 0.5, 1.0 + cov_th: + A: 2420.2651014497287 #deg^2 + n_e: 7.040818382014773 #arcmin-2 + n_psf: 0.752316232272063 #arcmin-2 + sigma_e: 0.30961528707207325 + shear: + path: unions_shapepipe_cut_struc_2024_v1.5.3.fits + covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + e1_col: e1 + e2_col: e2 + e1_PSF_col: e1_PSF + e2_PSF_col: e2_PSF + w_col: w_iv + R: 1.0 + star: + path: unions_shapepipe_star_2024_v1.5.a.fits + ra_col: RA + dec_col: Dec + e1_col: e1 + e2_col: e2 + psf: + path: unions_shapepipe_psf_2024_v1.5.a.fits + hdu: 1 + ra_col: RA + dec_col: Dec + e1_PSF_col: E1_PSF_HSM + e2_PSF_col: E2_PSF_HSM + e1_star_col: E1_STAR_HSM + e2_star_col: E2_STAR_HSM + PSF_size: SIGMA_PSF_HSM + star_size: SIGMA_STAR_HSM + PSF_flag: "FLAG_PSF_HSM" + star_flag: "FLAG_STAR_HSM" + square_size: True + + SP_v1.5.4: pipeline: SP - subdir: CFIS/v1.0/ShapePipe/ + subdir: CFIS/v1.0/ShapePipe/v1.5.x/v1.5.4 ls: dashed colour: green marker: d @@ -864,7 +907,7 @@ SP_v1.5.4: n_psf: 0.752316232272063 #arcmin-2 sigma_e: 0.30961528707207325 shear: - path: v1.5.x/v1.5.4/unions_shapepipe_cut_struc_2024_v1.5.4.fits + path: unions_shapepipe_cut_struc_2024_v1.5.4.fits covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt e1_col: e1 e2_col: e2 @@ -873,13 +916,13 @@ SP_v1.5.4: w_col: w_iv R: 1.0 star: - path: unions_shapepipe_star_2024_v1.4.1.fits + path: unions_shapepipe_star_2024_v1.5.a.fits ra_col: RA dec_col: Dec e1_col: e1 e2_col: e2 psf: - path: v1.5.x/unions_shapepipe_psf_2024_v1.5.2.fits + path: unions_shapepipe_psf_2024_v1.5.a.fits hdu: 1 ra_col: RA dec_col: Dec diff --git a/notebooks/cosmo_val/run_cosmo_val.py b/notebooks/cosmo_val/run_cosmo_val.py index a8f2f5e..79f04cb 100644 --- a/notebooks/cosmo_val/run_cosmo_val.py +++ b/notebooks/cosmo_val/run_cosmo_val.py @@ -1,6 +1,10 @@ # %% # %load_ext autoreload # %autoreload 2 + +import matplotlib +matplotlib.use("agg") + import matplotlib.pyplot as plt import numpy as np import treecorr @@ -30,7 +34,10 @@ cv.plot_scale_dependent_leakage() # %% -#cv.plot_objectwise_leakage() +cv.plot_objectwise_leakage() + +# %% +cv.plot_objectwise_leakage_aux() # %% cv.plot_ellipticity() @@ -38,6 +45,9 @@ # %% cv.plot_separation() +# %% +cv.calculate_additive_bias() + # %% cv.plot_2pcf() diff --git a/notebooks/demo_apply_hsp_masks.py b/notebooks/demo_apply_hsp_masks.py index f1f1c69..6f20c91 100644 --- a/notebooks/demo_apply_hsp_masks.py +++ b/notebooks/demo_apply_hsp_masks.py @@ -52,14 +52,15 @@ # + # Set parameters -obj._params["input_path"] = "unions_shapepipe_comprehensive_2024_v1.4.c.hdf5" -obj._params["output_path"] = "unions_shapepipe_comprehensive_struc_2024_v1.4.c.hdf5" -obj._params["mask_dir"] = f"{os.environ['HOME']}/v1.4.x/masks" +ver_maj = "v1.5" +obj._params["input_path"] = f"unions_shapepipe_comprehensive_2024_{ver_maj}.c.hdf5" +obj._params["output_path"] = f"unions_shapepipe_comprehensive_struc_2024_{ver_maj}.c.hdf5" +obj._params["mask_dir"] = f"{os.environ['HOME']}/{ver_maj}.x/masks" obj._params["nside"] = 131072 obj._params["file_base"] = "mask_r_" obj._params["bits"] = bits -obj._params["aux_mask_files"] = f"{obj._params['mask_dir']}/coverage.hsp" +obj._params["aux_mask_files"] = f"{obj._params['mask_dir']}/coverage_{ver_maj}.x.hsp" obj._params["aux_mask_labels"] = "npoint3" obj._params["verbose"] = True # - diff --git a/notebooks/demo_comprehensive_to_minimal_cat.py b/notebooks/demo_comprehensive_to_minimal_cat.py new file mode 100644 index 0000000..f68a43c --- /dev/null +++ b/notebooks/demo_comprehensive_to_minimal_cat.py @@ -0,0 +1,326 @@ +# --- +# jupyter: +# jupytext: +# text_representation: +# extension: .py +# format_name: light +# format_version: '1.5' +# jupytext_version: 1.15.1 +# kernelspec: +# display_name: Python 3 +# language: python +# name: python3 +# --- + +# # Comprehensive to minimal catalogue +# +# Transform an input comprehensive catalogue to an output minimal catalogue. + +# %reload_ext autoreload +# %autoreload 2 + +import sys +import os +import numpy as np +from astropy.io import fits +import matplotlib.pylab as plt + +from sp_validation import run_joint_cat as sp_joint +from sp_validation import util +from sp_validation.basic import metacal +from sp_validation import calibration +import sp_validation.cat as cat + +# Initialize calibration class instance +obj = sp_joint.CalibrateCat() + +config = obj.read_config_set_params("config_mask.P37.yaml") + +# !pwd + +# Get data. Set load_into_memory to False for very large files +dat, dat_ext = obj.read_cat(load_into_memory=False) + +if False: + n_max = 1_000_000 + print(f"MKDEBUG testing only first {n_max} objects") + dat = dat[:n_max] + dat_ext = dat_ext[:n_max] + +# ## Masking + +# List of masks to apply +masks_to_apply = [ + "overlap", + "IMAFLAGS_ISO", + "NGMIX_MOM_FAIL", + "NGMIX_ELL_PSFo_NOSHEAR_0", + "NGMIX_ELL_PSFo_NOSHEAR_1", + "8_Manual", +] + +# ### Pre-processing ShapePipe flags + +# + +# List to store all mask objects +masks = [] + +# Dict to associate labels with index in mask list +labels = {} +# - + +# Loop over mask sections from config file +config_data = {key: config[key] for key in ["dat", "dat_ext"] if key in config} +idx = 0 +for section, mask_list in config_data.items(): + + # Set data source + dat_source = dat if section == "dat" else dat_ext + + # Loop over mask information in this section + for mask_params in mask_list: + value = mask_params["value"] + + if mask_params["col_name"] in masks_to_apply: + + # Ensure 'range' kind has exactly two values + if mask_params["kind"] == "range" and ( + not isinstance(value, list) or len(value) != 2 + ): + raise ValueError( + f"Range kind requires a list of two values, got {value}" + ) + + # Create mask instance and append to list + my_mask = sp_joint.Mask( + **mask_params, dat=dat_source, verbose=obj._params["verbose"] + ) + masks.append(my_mask) + labels[my_mask._col_name] = idx + idx += 1 + else: + if obj._params["verbose"]: + print(f"Skipping mask {mask_params['col_name']}") + continue + +if obj._params["verbose"]: + print(f"Combining {len(masks)} masks (check: {len(masks_to_apply)})") +mask_combined = sp_joint.Mask.from_list(masks, label="combined") + +if obj._params["sky_regions"]: + + # MKDBEUG TODO: zooms as list in config + zoom_ra = [200, 205] + zoom_dec = [55, 60] + + sp_joint.sky_plots(dat, masks, labels, zoom_ra, zoom_dec) + +# + +# Output some mask statistics + +num_obj = dat.shape[0] + +with open("masks.txt", "w") as f_out: + + for my_mask in masks: + my_mask.print_summary(f_out) + + sp_joint.Mask.print_strings( + "flag", "label", f"{'num_ok':>10}", f"{'num_ok[%]':>10}", + f_out=f_out, + ) + print(file=f_out) + + for my_mask in masks: + my_mask.print_stats(num_obj, f_out=f_out) + + mask_combined.print_stats(num_obj, f_out=f_out) + + +# - + +# Get some memory back +for mask in masks: + del mask + +# + +# Remove mask columns that were applied earlier + +# Columns to keep +names_to_keep = [name for name in dat.dtype.names + dat_ext.dtype.names if name not in masks_to_apply] + +new_dtype = [ + (name, dt) for name, dt in + dat.dtype.descr + dat_ext.dtype.descr + if name in names_to_keep +] + +# Create a new structured array +new_dat = np.zeros(len(dat[mask_combined._mask]), dtype=new_dtype) # Initialize empty array with new dtype + +# Copy relevant columns and lines from each source array +for name in names_to_keep: + if name in dat.dtype.names: + new_dat[name] = dat[name][mask_combined._mask] + else: + new_dat[name] = dat_ext[name][mask_combined._mask] + +# + +# Add information to FITS header + +# Generate new header +header = fits.Header() + +# Add general config information to FITS header +obj.add_params_to_FITS_header(header) + +# Add mask information to FITS header +for my_mask in masks: + my_mask.add_summary_to_FITS_header(header) +# - + +obj._params + +# + +# Write extended data to new HDF5 file + +obj_appl = sp_joint.ApplyHspMasks() +output_path = obj._params["input_path"].replace("1.5.c", "1.5.m") +obj_appl._params["output_path"] = output_path +obj_appl._params["aux_mask_file_list"] = [] + +obj_appl.write_hdf5_file(dat, dat_ext) +# - + +from scipy import stats + + +def correlation_matrix(masks, confidence_level=0.9): + + n_key = len(masks) + print(n_key) + + cm = np.empty((n_key, n_key)) + r_val = np.zeros_like(cm) + r_cl = np.empty((n_key, n_key, 2)) + + for idx, mask_idx in enumerate(masks): + for jdx, mask_jdx in enumerate(masks): + res = stats.pearsonr(mask_idx._mask, mask_jdx._mask) + r_val[idx][jdx] = res.statistic + r_cl[idx][jdx] = res.confidence_interval( + confidence_level=confidence_level + ) + + return r_val, r_cl + + +# + +all_masks = masks[:-3] + +# + +if not obj._params["cmatrices"]: + print("Skipping cmatric calculations") + sys.exit(0) + +r_val, r_cl = correlation_matrix(all_masks) + +# + + +n = len(all_masks) +keys = [my_mask._label for my_mask in all_masks] + +plt.imshow(r_val, cmap="coolwarm", vmin=-1, vmax=1) +plt.xticks(np.arange(n), keys) +plt.yticks(np.arange(n), keys) +plt.xticks(rotation=90) +plt.colorbar() +plt.savefig("correlation_matrix.png") + +# - + + +def confusion_matrix(prediction, observation): + + result = {} + + pred_pos = sum(prediction) + result["true_pos"] = sum(prediction & observation) + result["true_neg"] = sum( + np.logical_not(prediction) & np.logical_not(observation) + ) + result["false_neg"] = sum(prediction & np.logical_not(observation)) + result["false_pos"] = sum(np.logical_not(prediction) & observation) + result["false_pos_rate"] = result["false_pos"] / ( + result["false_pos"] + result["true_neg"] + ) + result["false_neg_rate"] = result["false_neg"] / ( + result["false_neg"] + result["true_pos"] + ) + result["sensitivity"] = result["true_pos"] / ( + result["true_pos"] + result["false_neg"] + ) + result["specificity"] = result["true_neg"] / ( + result["true_neg"] + result["false_pos"] + ) + + cm = np.zeros((2, 2)) + cm[0][0] = result["true_pos"] + cm[1][1] = result["true_neg"] + cm[0][1] = result["false_neg"] + cm[1][0] = result["false_pos"] + cmn = cm.astype("float") / cm.sum(axis=1)[:, np.newaxis] + result["cmn"] = cmn + + return result + + +n_key = len(all_masks) +cms = np.zeros((n_key, n_key, 2, 2)) +for idx in range(n_key): + for jdx in range(n_key): + + if idx == jdx: + continue + + print(idx, jdx) + res = confusion_matrix(masks[idx]._mask, masks[jdx]._mask) + cms[idx][jdx] = res["cmn"] + +# + +import seaborn as sns + +fig = plt.figure(figsize=(30, 30)) +axes = np.empty((n_key, n_key), dtype=object) +for idx in range(n_key): + for jdx in range(n_key): + if idx == jdx: + continue + axes[idx][jdx] = plt.subplot2grid((n_key, n_key), (idx, jdx), fig=fig) + +matrix_elements = ["True", "False"] + +for idx in range(n_key): + for jdx in range(n_key): + + if idx == jdx: + continue + + ax = axes[idx, jdx] + sns.heatmap( + cms[idx][jdx], + annot=True, + fmt=".2f", + xticklabels=matrix_elements, + yticklabels=matrix_elements, + ax=ax, + ) + ax.set_ylabel(masks[idx]._label) + ax.set_xlabel(masks[jdx]._label) + +plt.show(block=False) +plt.savefig("confusion_matrix.png") +# - + +obj.close_hd5() diff --git a/requirements.txt b/requirements.txt index 17d7dad..965ba4d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,9 +15,10 @@ lenspack joblib jupyter jupytext==1.15.1 +#pymaster regions reproject -shear_psf_leakage==0.2.0 +shear_psf_leakage==0.2.1 scipy seaborn setuptools==65.5.1 @@ -26,7 +27,6 @@ statsmodels treecorr tqdm uncertainties -clmm pyccl opencv-python-headless #git+https://github.com/aguinot/cosmo-numba.git@main diff --git a/sp_validation/cat.py b/sp_validation/cat.py index 4916d9f..9c8a580 100644 --- a/sp_validation/cat.py +++ b/sp_validation/cat.py @@ -519,7 +519,7 @@ def write_shape_catalog( raise ValueError(f"Invalid weight type {w_type}") name = f"w_{w_type}" - col_info_arr.append(fits.Column(name=name, array=w, format="D"), descr) + col_info_arr.append((fits.Column(name=name, array=w, format="D"), descr)) # Additional columns ## Magnitude diff --git a/sp_validation/cosmo_val.py b/sp_validation/cosmo_val.py index f8e5515..409c9f4 100644 --- a/sp_validation/cosmo_val.py +++ b/sp_validation/cosmo_val.py @@ -1,6 +1,7 @@ # %% import os from contextlib import contextmanager +from functools import partial import colorama import matplotlib.pyplot as plt @@ -19,15 +20,24 @@ import healpy as hp import healsparse as hsp +import treecorr +import camb +import yaml from collections import Counter + import skyproj +from astropy.io import fits +from astropy.cosmology import Planck18 +from astropy import units as u +from astropy.coordinates import SkyCoord try: from cosmo_numba.B_modes.schneider2022 import get_pure_EB_modes except: - print("Could not import cosmo_numba, continuing without") + print("Could not import cosmo_numba, continuing without") +from . import utils_cosmo_val import pymaster as nmt from cs_util import plots as cs_plots from shear_psf_leakage import leakage @@ -158,7 +168,7 @@ def set_params_leakage_scale(self, ver): # Note: for SP these are calibrated shear estimates params_in["e1_col"] = self.cc[ver]["shear"]["e1_col"] params_in["e2_col"] = self.cc[ver]["shear"]["e2_col"] - params_in["w_col"] = self.cc[ver]["shear"]["w"] + params_in["w_col"] = self.cc[ver]["shear"]["w_col"] params_in["R11"] = None if ver != "DES" else self.cc[ver]["shear"]["R11"] params_in["R22"] = None if ver != "DES" else self.cc[ver]["shear"]["R22"] @@ -199,6 +209,9 @@ def set_params_leakage_object(self, ver): + f" shear yaml entry for version {ver}" ) + if "cols" in self.cc[ver]["shear"]: + params_in["cols"] = self.cc[ver]["shear"]["cols"] + params_in["verbose"] = False return params_in @@ -220,20 +233,18 @@ def init_results(self, objectwise=False): results[ver].prepare_output() @contextmanager - def temporarily_load_data(results): + def temporarily_load_data(results_instance): try: - self.print_start(f"Loading catalog for {ver}...", end="") - results.read_data() + self.print_start(f"Loading catalog {results_instance._params['input_path_shear']} ...", end="") + results_instance.read_data() self.print_done(f"done") yield finally: - self.print_done(f"Freeing {ver} from memory") - del results.dat_shear - del results.dat_PSF + self.print_done(f"Freeing {results_instance._params['input_path_shear']} from memory") + del results_instance.dat_shear + del results_instance.dat_PSF - results[ver].temporarily_load_data = lambda: temporarily_load_data( - results[ver] - ) + results[ver].temporarily_load_data = partial(temporarily_load_data, results[ver]) return results @@ -352,8 +363,6 @@ def set_params_rho_tau(self, params, params_psf, survey="other"): params["ra_units"] = "deg" params["dec_units"] = "deg" - params["w_col"] = self.cc[ver]["shear"]["w_col"] - return params @property @@ -376,7 +385,9 @@ def calculate_rho_tau_fits(self): self._xi_psf_sys = {} for ver in self.versions: params = self.set_params_rho_tau( - self.results[ver]._params, self.cc[ver]["psf"], survey=ver + self.results[ver]._params, + self.cc[ver]["psf"], + survey=ver, ) npatch = {"sim": 300, "jk": params["patch_number"]}.get( @@ -826,6 +837,31 @@ def plot_objectwise_leakage(self): cs_plots.savefig(out_path) self.print_done(f"Object-wise leakage coefficients plot saved to {out_path}") + def calculate_objectwise_leakage_aux(self): + + self.print_start("Object-wise leakage auxiliary quantities:") + for ver in self.versions: + self.print_magenta(ver) + + results_obj = self.results_objectwise[ver] + results_obj.check_params() + results_obj.update_params() + results_obj.prepare_output() + + with self.results[ver].temporarily_load_data(): + results_obj._dat = self.results[ver].dat_shear + + if not "cols" in results_obj._params: + self.print_green("Skipping object-wise leakage (aux quantities), no input columns for regression found") + else: + self.print_cyan(f"Computing object-wise leakage regression with aux quantities: {results_obj._params['cols']}") + + # Run + results_obj.obs_leakage() + + def plot_objectwise_leakage_aux(self): + self.calculate_objectwise_leakage_aux() + def plot_ellipticity(self, nbins=200): out_path = os.path.abspath(f"{self.cc['paths']['output']}/ell_hist.png") if os.path.exists(out_path): @@ -894,26 +930,29 @@ def calculate_additive_bias(self): self._c2 = {} for ver in self.versions: self.print_magenta(ver) - R = self.cc[ver]["shear"]["R"] - self._c1[ver] = np.average( - self.results[ver].dat_shear[self.cc[ver]["shear"]["e1_col"]] / R, - weights=self.results[ver].dat_shear[self.cc[ver]["shear"]["w_col"]], - ) - self._c2[ver] = np.average( - self.results[ver].dat_shear[self.cc[ver]["shear"]["e2_col"]] / R, - weights=self.results[ver].dat_shear[self.cc[ver]["shear"]["w_col"]], - ) - self.print_done("Finished additive bias calculation.") + with self.results[ver].temporarily_load_data(): + R = self.cc[ver]["shear"]["R"] + self._c1[ver] = np.average( + self.results[ver].dat_shear[self.cc[ver]["shear"]["e1_col"]] / R, + weights=self.results[ver].dat_shear[self.cc[ver]["shear"]["w_col"]], + ) + self._c2[ver] = np.average( + self.results[ver].dat_shear[self.cc[ver]["shear"]["e2_col"]] / R, + weights=self.results[ver].dat_shear[self.cc[ver]["shear"]["w_col"]], + ) + self.print_done("Finished additive bias calculation.") @property def c1(self): if not hasattr(self, "_c1"): + print("MKDEBUG calc c1") self.calculate_additive_bias() return self._c1 @property def c2(self): if not hasattr(self, "_c2"): + print("MKDEBUG calc c2") self.calculate_additive_bias() return self._c2 @@ -953,7 +992,7 @@ def calculate_2pcf(self): dec=self.results[ver].dat_shear["Dec"], g1=g1, g2=g2, - w=self.results[ver].dat_shear["w"], + w=self.results[ver].dat_shear[self.cc[ver]["shear"]["w_col"]], ra_units=self.treecorr_config["ra_units"], dec_units=self.treecorr_config["dec_units"], npatch=self.npatch, @@ -1200,7 +1239,7 @@ def calculate_aperture_mass_dispersion( dec=self.results[ver].dat_shear["Dec"], g1=g1, g2=g2, - w=self.results[ver].dat_shear["w"], + w=self.results[ver].dat_shear[self.cc[ver]["shear"]["w_col"]], ra_units=self.treecorr_config["ra_units"], dec_units=self.treecorr_config["dec_units"], npatch=npatch, @@ -2204,3 +2243,7 @@ def plot_pseudo_cl(self): plt.suptitle('Pseudo-Cl BB (Gaussian covariance)') plt.legend() plt.savefig(out_path) +<<<<<<< HEAD +# %% +======= +>>>>>>> upstream/develop diff --git a/sp_validation/cosmology.py b/sp_validation/cosmology.py index 1260c96..66986bc 100644 --- a/sp_validation/cosmology.py +++ b/sp_validation/cosmology.py @@ -26,19 +26,6 @@ from sp_validation.survey import get_footprint -# For theoretical modelling of cluster lensing -try: - import clmm -except Exception: - print('Could not import clmm, continuing...') - -try: - # import clmm.modeling as cm - from clmm import Cosmology -except Exception: - print('Could not import clmm.Cosmology, continuing...') - - # For correlation function calculations import treecorr diff --git a/sp_validation/run_joint_cat.py b/sp_validation/run_joint_cat.py index e8b565f..f04f023 100644 --- a/sp_validation/run_joint_cat.py +++ b/sp_validation/run_joint_cat.py @@ -164,8 +164,6 @@ def write_hdf5_file(self, dat, output_path=None): output_path : str, optional output file path; when ``None`` (default) use self._params['output_path'] - patches : list, optional - input patches, list of str, default is ``None`` """ if output_path is None: @@ -488,8 +486,6 @@ def write_hdf5_file(self, dat, patches): dset = f.create_dataset("data", data=dat) dset[:] = dat - # super().write_hdf5_file(dat, output_path=output_path) - def write_hdf5_header(self, hd5file, patches=None): """Write HDF5 Header. @@ -1273,16 +1269,20 @@ def to_bool(self, hsp_mask): return mask_bool @classmethod - def print_strings(cls, coln, lab, num, fnum): - print(f"{coln:30s} {lab:30s} {num:10s} {fnum:10s}") + def print_strings(cls, coln, lab, num, fnum, f_out=None): + msg = f"{coln:30s} {lab:30s} {num:10s} {fnum:10s}" + print(msg) + if f_out: + print(msg, file=f_out) + - def print_stats(self, num_obj): + def print_stats(self, num_obj, f_out=None): if self._num_ok is None: self._num_ok = sum(self._mask) si = f"{self._num_ok:10d}" sf = f"{self._num_ok/num_obj:10.2%}" - self.print_strings(self._col_name, self._label, si, sf) + self.print_strings(self._col_name, self._label, si, sf, f_out=f_out) def get_sign(self): diff --git a/sp_validation/utils_cosmo_val.py b/sp_validation/utils_cosmo_val.py index ce323f6..5dac7ae 100644 --- a/sp_validation/utils_cosmo_val.py +++ b/sp_validation/utils_cosmo_val.py @@ -93,7 +93,7 @@ def get_params_rho_tau(cat, survey="other"): params["dec_units"] = "deg" - params["w_col"] = cat['shear']["w"] + params["w_col"] = cat['shear']["w_col"] params["e1_col"] = cat['shear']["e1_col"] params["e2_col"] = cat['shear']["e2_col"] params["R11"] = cat['shear'].get("R11")