diff --git a/docs/source/pipeline_canfar.md b/docs/source/pipeline_canfar.md index cfba3d2f..e0a4d25a 100644 --- a/docs/source/pipeline_canfar.md +++ b/docs/source/pipeline_canfar.md @@ -121,10 +121,19 @@ patchnum=`tr $patch P ''` create_final_cat.py -m final_cat_$patch.hdf5 -i . -p $patch/cfis/final_cat.param -P $patchnum -o $patch/n_tiles_final.txt -v # Star catalogue -combine_runs.bash -p $psf -c psf +combine_runs.bash -p $psf -c psf + +## The following are not necessary, only to get merged catalogue with non-converted quantities shapepipe_run -c $SP_CONFIG/config_Ms_$psf.ini shapepipe_run -c $SP_CONFIG/config_Pl_$psf.ini +# With fourth-order moment, additional psf interpol validation runs +combine_runs.bash -p $psf -c psf_val + +## The following are not necessary, only to get merged catalogue with non-converted quantities +shapepipe_run -c $SP_CONFIG/config_Msval_$psf.ini + + # Convert star cat to WCS ## Convert all input validation psf files and create directories par patch ## psf_conv_all/P? @@ -135,7 +144,7 @@ cd ../star_cat convert_psf_pix2world.py -i .. -P $patchnum -v # Combine previously created files as links within one SP run dir -# (for the v1.4 setup only one link +# (for the v1.4 setup only one link) cd P$patch combine_runs.bash -p psfex -c psf_conv diff --git a/example/cfis/config_Msval_psfex.ini b/example/cfis/config_Msval_psfex.ini new file mode 100644 index 00000000..12025c61 --- /dev/null +++ b/example/cfis/config_Msval_psfex.ini @@ -0,0 +1,70 @@ +# ShapePipe configuration file for post-processing. +# merge star cat. + + +## Default ShapePipe options +[DEFAULT] + +# verbose mode (optional), default: True, print messages on terminal +VERBOSE = True + +# Name of run (optional) default: shapepipe_run +RUN_NAME = run_sp_Msval + +# Add date and time to RUN_NAME, optional, default: False +RUN_DATETIME = False + + +## ShapePipe execution options +[EXECUTION] + +# Module name, single string or comma-separated list of valid module runner names +MODULE = merge_starcat_runner + +# Parallel processing mode, SMP or MPI +MODE = SMP + + +## ShapePipe file handling options +[FILE] + +# Log file master name, optional, default: shapepipe +LOG_NAME = log_sp + +# Runner log file name, optional, default: shapepipe_runs +RUN_LOG_NAME = log_run_sp + +# Input directory, containing input files, single string or list of names +INPUT_DIR = $SP_RUN/output + +# Output directory +OUTPUT_DIR = $SP_RUN/output + + +## ShapePipe job handling options +[JOB] + +# Batch size of parallel processing (optional), default is 1, i.e. run all jobs in serial +SMP_BATCH_SIZE = 4 + +# Timeout value (optional), default is None, i.e. no timeout limit applied +TIMEOUT = 96:00:00 + + +## Module options +[MERGE_STARCAT_RUNNER] + +INPUT_DIR = last:psfex_interp_runner + +PSF_MODEL = psfex + +NUMBERING_SCHEME = -0000000-0 + +# Input file pattern(s), list of strings with length matching number of expected input file types +# Cannot contain wild cards +FILE_PATTERN = validation_psf + +# FILE_EXT (optional) list of string extensions to identify input files +FILE_EXT = .fits + +HDU = 2 diff --git a/example/cfis/config_exp_psfex_validation.ini b/example/cfis/config_exp_psfex_validation.ini new file mode 100644 index 00000000..adb352df --- /dev/null +++ b/example/cfis/config_exp_psfex_validation.ini @@ -0,0 +1,82 @@ +# ShapePipe configuration file for single-exposures. PSFex PSF model +# validation. + + +## Default ShapePipe options +[DEFAULT] + +# verbose mode (optional), default: True, print messages on terminal +VERBOSE = True + +# Name of run (optional) default: shapepipe_run +RUN_NAME = run_sp_exp_Pv + +# Add date and time to RUN_NAME, optional, default: True +; RUN_DATETIME = False + + +## ShapePipe execution options +[EXECUTION] + +# Module name, single string or comma-separated list of valid module runner names +MODULE = psfex_interp_runner + + +# Run mode, SMP or MPI +MODE = SMP + + +## ShapePipe file handling options +[FILE] + +# Log file master name, optional, default: shapepipe +LOG_NAME = log_sp + +# Runner log file name, optional, default: shapepipe_runs +RUN_LOG_NAME = log_run_sp + +# Input directory, containing input files, single string or list of names with length matching FILE_PATTERN +INPUT_DIR = $SP_RUN/output + +# Output directory +OUTPUT_DIR = $SP_RUN/output + + +## ShapePipe job handling options +[JOB] + +# Batch size of parallel processing (optional), default is 1, i.e. run all jobs in serial +SMP_BATCH_SIZE = 16 + +# Timeout value (optional), default is None, i.e. no timeout limit applied +TIMEOUT = 96:00:00 + + +## Module options + +[PSFEX_INTERP_RUNNER] + +# Use 20% sample for PSF validation +FILE_PATTERN = star_split_ratio_80, star_split_ratio_20, psfex_cat + +FILE_EXT = .psf, .fits, .cat + +NUMBERING_SCHEME = -0000000-0 + +# Run mode for psfex interpolation: +# CLASSIC: 'classical' run, interpolate to object positions +# MULTI-EPOCH: interpolate for multi-epoch images +# VALIDATION: validation for single-epoch images +MODE = VALIDATION + +# Column names of position parameters +POSITION_PARAMS = XWIN_IMAGE,YWIN_IMAGE + +# If True, measure and store ellipticity of the PSF (using moments) +GET_SHAPES = True + +# Minimum number of stars per CCD for PSF model to be computed +STAR_THRESH = 22 + +# Maximum chi^2 for PSF model to be computed on CCD +CHI2_THRESH = 2 diff --git a/pyproject.toml b/pyproject.toml index bd83613c..27a0a18c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ authors = [ { name = "Martin Kilbinger", email = "martin.kilbinger@cea.fr" } ] license = { "file" = "LICENSE" } -requires-python = ">=3.11" +requires-python = ">=3.9" dependencies = [ "joblib>=0.13", "modopt>=1.2", @@ -46,4 +46,22 @@ shapepipe_run = "shapepipe.shapepipe_run:main" [tool.pytest.ini_options] addopts = "--verbose --cov=shapepipe" -testpaths = ["shapepipe"] \ No newline at end of file +testpaths = ["shapepipe"] + +[tool.setuptools] +include-package-data = true + +[tool.setuptools.data-files] +"bin" = [ + "scripts/sh/job_sp_canfar.bash", + "scripts/sh/init_run_exclusive_canfar.sh", + "scripts/sh/curl_canfar_local.sh", + "scripts/sh/functions.sh", + "scripts/sh/stats_jobs_canfar.sh", + "scripts/sh/combine_runs.bash", + "scripts/python/update_runs_log_file.py", + "scripts/python/summary_run.py", + "scripts/python/summary_params_pre_v2.py", + "scripts/python/link_to_exp_for_tile.py", + "scripts/python/convert_psf_pix2world.py", +] \ No newline at end of file diff --git a/scripts/python/convert_psf_pix2world.py b/scripts/python/convert_psf_pix2world.py index dea99656..d8a265ad 100755 --- a/scripts/python/convert_psf_pix2world.py +++ b/scripts/python/convert_psf_pix2world.py @@ -16,7 +16,7 @@ from cs_util import logging -def transform_shape(mom_list, jac): +def transform_shape(mom_list, jac, rescale=False): """Transform Shape. Transform shape (ellipticity and size) using a Jacobian. @@ -24,37 +24,86 @@ def transform_shape(mom_list, jac): Parameters ---------- mom_list : list - input moment measurements; each list element contains - first and second ellipticity component and size + input "moment" measurements; each list element contains + first and second ellipticity components and size jac : galsim.JacobianWCS Jacobian transformation matrix information + rescale : bool, optional + rescale ellipticity to modulus if ``True``, to prevent + values outside [-1; 1]; default is ``False`` Returns ------- list transformed shape parameters, which are - first and second ellipticity component and size + first and second ellipticity components and size """ scale, shear, theta, flip = jac.getDecomposition() sig_tmp = mom_list[2] * scale - shape = galsim.Shear(g1=mom_list[0], g2=mom_list[1]) + + if rescale: + modulus = np.sqrt(mom_list[0] ** 2 + mom_list[1] ** 2) * 1.1 + else: + modulus = 1 + + shape = galsim.Shear(g1=mom_list[0]/modulus, g2=mom_list[1]/modulus) + + if flip: + print(f"g1 sign change, flip = {flip}") + shape = galsim.Shear(g1=-shape.g1, g2=shape.g2) + + shape = galsim.Shear(g=shape.g, beta=shape.beta + theta) + shape = shear + shape + + return shape.g1 * modulus, shape.g2 * modulus, sig_tmp + + +def transform_shape_scale(mom_list, jac): + """Transform Shape. + + Transform shape (ellipticity and size) using a Jacobian. + Input "ellipticities" can be outside [-1, 1]. + + Parameters + ---------- + mom_list : list + input "moment" measurements; each list element contains + first and second ellipticity components + jac : galsim.JacobianWCS + Jacobian transformation matrix information + + Returns + ------- + list + transformed shape parameters, which are + first and second "ellipticity" components + + """ + _, shear, theta, flip = jac.getDecomposition() + + # Scale to within [-1, 1] + modulus = np.sqrt(mom_list[0] ** 2 + mom_list[1] ** 2) * 1.1 + shape = galsim.Shear(g1=mom_list[0]/modulus, g2=mom_list[1]/modulus) + if flip: - # The following output is not observed - print("FLIP!") + print(f"g1 sign change, flip = {flip}") shape = galsim.Shear(g1=-shape.g1, g2=shape.g2) + shape = galsim.Shear(g=shape.g, beta=shape.beta + theta) shape = shear + shape - return shape.g1, shape.g2, sig_tmp + # Scale back to original modulus + + return shape.g1 * modulus, shape.g2 * modulus class Loc2Glob(object): r"""Change from local to global coordinates. Class to pass from local coordinates to global coordinates under - CFIS (CFHT) MegaCam instrument. The geometrical informcation of the + CFIS (CFHT) MegaCam instrument. The geometrical information of the instrument is encoded in this function. Parameters @@ -378,6 +427,7 @@ def params_default(self): self._params = { "input_base_dir": ".", "output_base_dir": ".", + "sub_dir_pattern": "run_sp_combined_psf", "mode": "merge", "patches": "", "psf": "psfex", @@ -399,6 +449,7 @@ def params_default(self): + " /P/output;" " default is {}" ), + "sub_dir_pattern": "run directory pattern, default is {}", "mode": ( "run mode, allowed are 'merge', 'test'; default is" + " '{}'" ), @@ -415,10 +466,14 @@ def params_default(self): ("E1_PSF_HSM", float), ("E2_PSF_HSM", float), ("SIGMA_PSF_HSM", float), + ("M_4_PSF_1", float), + ("M_4_PSF_2", float), ("FLAG_PSF_HSM", float), ("E1_STAR_HSM", float), ("E2_STAR_HSM", float), ("SIGMA_STAR_HSM", float), + ("M_4_STAR_1", float), + ("M_4_STAR_2", float), ("FLAG_STAR_HSM", float), ("CCD_NB", int), ] @@ -435,11 +490,8 @@ def update_params(self): """ if self._params["psf"] == "psfex": - #self._params["sub_dir_pattern"] = "run_sp_exp_202" - self._params["sub_dir_pattern"] = "run_sp_combined_psf" self._params["sub_dir_psfint"] = "psfex_interp_runner" elif self._params["psf"] == "mccd": - self._params["sub_dir_pattern"] = "run_sp_exp_SxSePsf_202" self._params["sub_dir_psfint"] = "mccd_fit_val_runner" self._params["sub_dir_setools"] = "setools_runner/output/mask" else: @@ -471,7 +523,7 @@ def run(self): print("Running patch:", patch) patch_dir = f"{self._params['input_base_dir']}/P{patch}/output/" - subdirs = f"{patch_dir}/{self._params['sub_dir_pattern']}*" + subdirs = f"{patch_dir}/{self._params['sub_dir_pattern']}" exp_run_dirs = glob.glob(subdirs) n_exp_runs = len(exp_run_dirs) print( @@ -567,9 +619,13 @@ def transform_exposures(self, output_dir, patch, idx, exp_run_dir): new_e1_psf = np.zeros_like(psf_file[mod]) new_e2_psf = np.zeros_like(psf_file[mod]) new_sig_psf = np.zeros_like(psf_file[mod]) + new_m41_psf = np.zeros_like(psf_file[mod]) + new_m42_psf = np.zeros_like(psf_file[mod]) new_e1_star = np.zeros_like(psf_file[mod]) new_e2_star = np.zeros_like(psf_file[mod]) new_sig_star = np.zeros_like(psf_file[mod]) + new_m41_star = np.zeros_like(psf_file[mod]) + new_m42_star = np.zeros_like(psf_file[mod]) if self._params["psf"] == "psfex": header = fits.Header.fromstring( @@ -580,10 +636,6 @@ def transform_exposures(self, output_dir, patch, idx, exp_run_dir): k = 0 for ind, obj in enumerate(psf_file): try: - # jac = wcs.jacobian(world_pos=galsim.CelestialCoord( - # ra=obj["RA"]*galsim.degrees, - # dec=obj["DEC"]*galsim.degrees - # )) jac = wcs.jacobian( image_pos=galsim.PositionD( obj["X"], @@ -592,6 +644,7 @@ def transform_exposures(self, output_dir, patch, idx, exp_run_dir): ) except Exception: continue + g1_psf_tmp, g2_psf_tmp, sig_psf_tmp = transform_shape( [ obj["E1_PSF_HSM"], @@ -600,10 +653,14 @@ def transform_exposures(self, output_dir, patch, idx, exp_run_dir): ], jac, ) - new_e1_psf[ind] = g1_psf_tmp new_e2_psf[ind] = g2_psf_tmp new_sig_psf[ind] = sig_psf_tmp + new_m41_psf[ind], new_m42_psf[ind], _ = transform_shape( + [obj["M_4_PSF_1"], obj["M_4_PSF_2"], 0], + jac, + rescale=True, + ) g1_star_tmp, g2_star_tmp, sig_star_tmp = transform_shape( [ @@ -616,6 +673,12 @@ def transform_exposures(self, output_dir, patch, idx, exp_run_dir): new_e1_star[ind] = g1_star_tmp new_e2_star[ind] = g2_star_tmp new_sig_star[ind] = sig_star_tmp + new_m41_star[ind], new_m42_star[ind], _ = transform_shape( + [obj["M_4_STAR_1"], obj["M_4_STAR_2"], 0], + jac, + rescale=True, + ) + k += 1 exp_cat = np.array( @@ -631,10 +694,14 @@ def transform_exposures(self, output_dir, patch, idx, exp_run_dir): new_e1_psf, new_e2_psf, new_sig_psf, + new_m41_psf, + new_m42_psf, psf_file["FLAG_PSF_HSM"], new_e1_star, new_e2_star, new_sig_star, + new_m41_star, + new_m42_star, psf_file["FLAG_STAR_HSM"], np.ones_like(psf_file["RA"], dtype=int) * ccd_id, diff --git a/scripts/python/get_ccds_with_psf.py b/scripts/python/get_ccds_with_psf.py old mode 100644 new mode 100755 diff --git a/scripts/python/summary_params_pre_v2.py b/scripts/python/summary_params_pre_v2.py index 10118039..1cbe59b2 100644 --- a/scripts/python/summary_params_pre_v2.py +++ b/scripts/python/summary_params_pre_v2.py @@ -248,14 +248,19 @@ def set_jobs_v2_pre_v2(patch, verbose): verbose=verbose, ) - # Post-processing + # PSF validation jobs["1024"] = job_data( - "1024", - ["run_sp_combined_final"], - ["make_catalog_runner"], - "tile_IDs", + 1024, + "run_sp_exp_Pv", + ["psfex_interp_runner"] * 2, + "shdus", + n_mult=[1] * 2, path_main=path_main, - path_left="output", + path_left="exp_runs", + output_subdirs="shdus", + path_right="output", + path_output=["output", "logs"], + special=[False, True], verbose=verbose, ) @@ -269,6 +274,6 @@ def set_jobs_v2_pre_v2(patch, verbose): verbose=verbose, ) - return jobs, list_tile_IDs_dot + return jobs, list_tile_IDs_dot, path_main diff --git a/scripts/python/summary_run.py b/scripts/python/summary_run.py index f24d6574..d7ac36d5 100755 --- a/scripts/python/summary_run.py +++ b/scripts/python/summary_run.py @@ -19,7 +19,7 @@ def main(argv=None): verbose = False - jobs, list_tile_IDs_dot = set_jobs_v2_pre_v2(patch, verbose) + jobs, list_tile_IDs_dot, path_main = set_jobs_v2_pre_v2(patch, verbose) list_tile_IDs = job_data.replace_dot_dash(list_tile_IDs_dot) @@ -28,7 +28,7 @@ def main(argv=None): job_data.print_stats_header() - exp_IDs_path = "exp_numbers.txt" + exp_IDs_path = f"{path_main}/exp_numbers.txt" if os.path.exists(exp_IDs_path): # Read exposure ID list if file exists all_exposures = get_IDs_from_file(exp_IDs_path) @@ -54,7 +54,7 @@ def main(argv=None): par_runtime, all_exposures ) - jobs[key].write_IDs_to_file("exp_numbers.txt", all_exposures) + jobs[key].write_IDs_to_file(exp_IDs_path, all_exposures) jobs[key].check_numbers(par_runtime, indices=[2]) diff --git a/scripts/python/update_runs_log_file.py b/scripts/python/update_runs_log_file.py old mode 100644 new mode 100755 diff --git a/scripts/sh/combine_runs.bash b/scripts/sh/combine_runs.bash index 8cbf9bbd..3c0772f7 100755 --- a/scripts/sh/combine_runs.bash +++ b/scripts/sh/combine_runs.bash @@ -20,7 +20,7 @@ usage="Usage: $(basename "$0") [OPTIONS] \tPSF model, allowed are 'psfex', 'mccd', 'setools', default='$psf'\n -c, --cat TYPE\n \tCatalogue type, allowed are 'final', 'flag_tile', 'flag_exp', \n - \t'psf', 'psf_conv', 'image', 'shdu', default='$cat'\n + \t'psf', 'psf_val', 'psf_conv', 'image', 'shdu', default='$cat'\n " ## Parse command line @@ -53,10 +53,11 @@ if [ "$cat" != "final" ] \ && [ "$cat" != "tile_detection" ] \ && [ "$cat" != "flag_exp" ] \ && [ "$cat" != "psf" ] \ + && [ "$cat" != "psf_val" ] \ && [ "$cat" != "psf_conv" ] \ && [ "$cat" != "image" ] \ && [ "$cat" != "shdu" ]; then - echo "cat (option -c) needs to be 'final', 'tile_detection', 'flag_tile', 'flag_exp', 'psf', 'psf_conv', 'shdu', or 'image'" + echo "cat (option -c) needs to be 'final', 'tile_detection', 'flag_tile', 'flag_exp', 'psf', 'psf_val', 'psf_conv', 'shdu', or 'image'" exit 2 fi @@ -163,9 +164,14 @@ elif [ "$cat" == "psf" ]; then module="mccd_interp_runner" fi +elif [ "$cat" == "psf_val" ]; then + + run_in="$pwd/exp_runs/*/$out_base/run_sp_exp_Pv_*" + pattern="validation_psf-*" + module="psfex_interp_runner" + elif [ "$cat" == "psf_conv" ]; then - #run_in="$pwd/../P?" run_in="$pwd" pattern="validation_psf_conv-*" module="psfex_interp_runner" diff --git a/scripts/sh/curl_canfar_local.sh b/scripts/sh/curl_canfar_local.sh index ada5dc4f..6798d9e5 100755 --- a/scripts/sh/curl_canfar_local.sh +++ b/scripts/sh/curl_canfar_local.sh @@ -32,7 +32,7 @@ sm=1 pat="- " ## Help string -usage="Usage: $(basename "$0") -j JOB -[e ID |-f file_IDs] -k KIND [OPTIONS] +usage="Usage: $(basename "$0") -j JOB -[e ID |-f file_IDs] [OPTIONS] \n\nOptions:\n -h\tthis message\n -j, --job JOB\tRunning JOB, bit-coded\n @@ -58,6 +58,8 @@ usage="Usage: $(basename "$0") -j JOB -[e ID |-f file_IDs] -k KIND [OPTIONS] \tremote command to run on canfar, default='$cmd_remote'\n -S, --scratch\n \tprocessing scratch directory, default is None ($scratch)\n + -B, --batch\n + \tbatch size = number of jobs per iteration, default=$batch\n -b, --batch_max\n \tmaximum batch size = number of jobs run simultaneously, default=$batch_max\n --debug_out PATH\n diff --git a/scripts/sh/functions.sh b/scripts/sh/functions.sh index 02f6544b..4c51e51a 100644 --- a/scripts/sh/functions.sh +++ b/scripts/sh/functions.sh @@ -131,6 +131,7 @@ function get_kind_from_job() { exit 6 fi + # job=2 -> set kind to exp kind="exp" elif [ $job_to_test == 8 ]; then if [ "$kind" == "tile" ]; then @@ -138,6 +139,16 @@ function get_kind_from_job() { exit 6 fi + # job=8 -> set kind to exp + kind="exp" + + elif [ $job_to_test == 1024 ]; then + if [ "$kind" == "tile" ]; then + echo "Error: Invalid job $job. mixing tile and exp kinds" + exit 6 + fi + + # job=1024 -> set kind to exp kind="exp" else if [ "$kind" == "exp" ]; then @@ -145,13 +156,14 @@ function get_kind_from_job() { exit 6 fi - # job != 32 -> set kind to tile + # job != 2, 8, 32, 1024 -> set kind to tile kind="tile" fi fi # Multiply job number by two to get next bitwise number + job_to_test=$((job_to_test * 2)) done diff --git a/scripts/sh/init_run_exclusive_canfar.sh b/scripts/sh/init_run_exclusive_canfar.sh index 67f62797..18ce0165 100755 --- a/scripts/sh/init_run_exclusive_canfar.sh +++ b/scripts/sh/init_run_exclusive_canfar.sh @@ -28,7 +28,7 @@ pat="-- " ## Help string -usage="Usage: $(basename "$0") -j JOB -e ID -k KIND [OPTIONS] +usage="Usage: $(basename "$0") -j JOB -e ID [OPTIONS] \n\nOptions:\n -h\tthis message\n -j, --job JOB\tRUnning JOB, bit-coded\n @@ -334,8 +334,6 @@ command "ln -sf $dir/output/run_sp_Ma_tile" $dry_run if [ "$sp_local" == "0" ]; then # Exposures command "ln -sf $dir/output/run_sp_Ma_exp" $dry_run -#else - #command "rm -f $dir/output/run_sp_Ma_exp" $dry_run fi # Check for existing exp SpMh dir @@ -353,11 +351,7 @@ fi (( do_job = $job & 2 )) if [ $do_job != 0 ] && [ "$sp_local" == "1" ]; then -#if [ ! -e "run_exp_Sp_shdu" ] && [ "$sp_local" == "1" ]; then - # run local Sp if not done already; works only with mh_local=1; this step needs to be done - # before following mh_local=1 steps message "run local sp" $debug_out -1 - #command "rm -rf run_sp_GitFeGie*/get_images_runner_run_2" $dry_run command "rm -rf run_sp_Gie*" $dry_run command "rm -rf run_sp_exp_Sp*" $dry_run @@ -420,8 +414,7 @@ fi if [ "$mh_local" == "0" ]; then if [ ! -f log_exp_headers.sqlite ]; then - # Global Mh and file does not exist -> symlink to - # gllobal mh file + # Global Mh and file does not exist -> symlink to global mh file message "creating global link to exp headers (mh_local=0)" $debug_out -1 command "ln -s $dir/output/log_exp_headers.sqlite" $dry_run else @@ -490,15 +483,6 @@ if [[ $do_job != 0 ]]; then command "link_to_exp_for_tile.py -t $ID -i tile_runs -I exp_runs -s $sp_local" $dry_run cd ${kind}_runs/$ID/output - # Remove duplicate job-16 runs (tile detection) - # New (P8) commented - #n_16=`ls -rt1d run_sp_tile_Sx_* | wc -l` - #if [ "$n_16" != "1" ]; then - #n_remove="$(($n_16-1))" - #echo "removing $n_remove duplicate old job-16 runs" - #command "rm -rf `ls -rt1d run_sp_tile_Sx_* | head -$n_remove`" $dry_run - #fi - # Remove previous runs of this job rm -rf run_sp_tile_PsViSmVi* fi @@ -552,7 +536,7 @@ fi if [[ $do_job != 0 ]]; then # Remove previous runs of this job - rm -rf run_sp_Ms_20??_* + rm -rf run_sp_Ms_20??-* fi @@ -560,8 +544,24 @@ fi if [[ $do_job != 0 ]]; then # Remove previous runs of this job - rm -rf run_sp_Mc_20??_* + rm -rf run_sp_Mc_20??-* + +fi + +(( do_job = $job & 1024 )) +if [[ $do_job != 0 ]]; then + + # Remove duplicate exp runs + n=`ls -rdtl run_sp_exp_SxSePsf* | wc -l` + if [ "$n" != "1" ]; then + n_remove=$((n-1)) + echo "Remove ${n_remove} previous exp jobs" + rm -rf `ls -rdt1 run_sp_exp_SxSePsf* | head -n $n_remove` + fi + # Remove previous runs of this job + echo "Remove previous Pv jobs" + rm -rf `pwd`/run_sp_exp_Pv_20??-* fi diff --git a/scripts/sh/job_sp_canfar.bash b/scripts/sh/job_sp_canfar.bash index 54036c93..a4bf8526 100755 --- a/scripts/sh/job_sp_canfar.bash +++ b/scripts/sh/job_sp_canfar.bash @@ -40,6 +40,7 @@ usage="Usage: $(basename "$0") [OPTIONS] [TILE_ID] \t 64: galaxy selection on tiles (offline)\n \t 128: shapes and morphology (offline)\n \t 256: paste catalogues (offline)\n + \t 512: create joint (ngmix+SExtr) catalogues (offline)\n -c, --config_dir DIR\n \t config file directory, default='$config_dir'\n -p, --psf MODEL\n @@ -569,13 +570,12 @@ if [[ $do_job != 0 ]]; then fi -# MKDEBUG: Putting Mh at the end for now, could be integrated before 16. (( do_job = $job & 1024 )) if [[ $do_job != 0 ]]; then command_cfg_shapepipe \ - "config_exp_Mh.ini" \ - "Run shapepipe (merge exp headers)" \ + "config_exp_psfex_validation.ini" \ + "Run shapepipe (PSF validation only)" \ $n_smp \ $exclusive diff --git a/src/shapepipe/modules/merge_starcat_package/merge_starcat.py b/src/shapepipe/modules/merge_starcat_package/merge_starcat.py index 3b06cd07..6d786b0d 100644 --- a/src/shapepipe/modules/merge_starcat_package/merge_starcat.py +++ b/src/shapepipe/modules/merge_starcat_package/merge_starcat.py @@ -554,8 +554,8 @@ def process(self): """ x, y, ra, dec = [], [], [], [] - g1_psf, g2_psf, size_psf = [], [], [] - g1, g2, size = [], [], [] + g1_psf, g2_psf, size_psf, m41_psf, m42_psf = [], [], [], [], [] + g1, g2, size, m41, m42 = [], [], [], [], [] flag_psf, flag_star = [], [] mag, snr, psfex_acc = [], [], [] ccd_nb = [] @@ -584,8 +584,12 @@ def process(self): g1_psf += list(data_j["E1_PSF_HSM"]) g2_psf += list(data_j["E2_PSF_HSM"]) size_psf += list(data_j["SIGMA_PSF_HSM"] ** 2) + m41_psf += list(data_j["M_4_PSF_1"]) + m42_psf += list(data_j["M_4_PSF_1"]) g1 += list(data_j["E1_STAR_HSM"]) g2 += list(data_j["E2_STAR_HSM"]) + m41 += list(data_j["M_4_STAR_1"]) + m42 += list(data_j["M_4_STAR_2"]) size += list(data_j["SIGMA_STAR_HSM"] ** 2) # flags @@ -633,9 +637,13 @@ def process(self): "E1_PSF_HSM": g1_psf, "E2_PSF_HSM": g2_psf, "SIGMA_PSF_HSM": np.sqrt(size_psf), + "M_4_PSF_1": m41_psf, + "M_4_PSF_2": m42_psf, "E1_STAR_HSM": g1, "E2_STAR_HSM": g2, "SIGMA_STAR_HSM": np.sqrt(size), + "M_4_STAR_1": m41, + "M_4_STAR_2": m42, "FLAG_PSF_HSM": flag_psf, "FLAG_STAR_HSM": flag_star, "MAG": mag, diff --git a/src/shapepipe/modules/psfex_interp_package/psfex_interp.py b/src/shapepipe/modules/psfex_interp_package/psfex_interp.py index da12f258..a094d0e1 100644 --- a/src/shapepipe/modules/psfex_interp_package/psfex_interp.py +++ b/src/shapepipe/modules/psfex_interp_package/psfex_interp.py @@ -2,7 +2,7 @@ This module computes the PSFs from a PSFEx model at several galaxy positions. -:Authors: Morgan Schmitz, Axel Guinot, Martin Kilbinger +:Authors: Morgan Schmitz, Axel Guinot, Martin Kilbinger, Sacha Guerrini """ @@ -12,6 +12,7 @@ import numpy as np from astropy.io import fits from sqlitedict import SqliteDict +import scipy.linalg as alg from shapepipe.pipeline import file_io @@ -319,6 +320,47 @@ def _get_psfshapes(self): hsm.FindAdaptiveMom(Image(psf), strict=False) for psf in self.interp_PSFs ] + #Compute the list of pq indexes + pqlist = [ + [p, 4-p] for p in range(0, 5) + ] + + #Compute the 4th order moments (Code taken from PSFHOME) + for psf, moms in zip(self.interp_PSFs, psf_moms): + + #Compute the normalisation of the image + y, x = np.mgrid[:psf.shape[0], :psf.shape[1]]+1 + + M = np.zeros((2, 2)) + e1 = moms.observed_shape.e1 + e2 = moms.observed_shape.e2 + sigma4 = moms.moments_sigma**4 + c = (1+e1) / (1-e1) + M[1, 1] = np.sqrt(sigma4 / (c-0.25*e2**2*(1+c)**2)) + M[0, 0] = c*M[1, 1] + M[0, 1] = 0.5*e2*(M[1, 1] + M[0, 0]) + M[1, 0] = M[0, 1] + + pos = np.array([x-moms.moments_centroid.x, y-moms.moments_centroid.y]) + inv_M = np.linalg.inv(M) + sqrt_inv_M = alg.sqrtm(inv_M) + + std_pos = np.einsum('ij,jqp->iqp',sqrt_inv_M,pos) + weight = np.exp(-0.5* np.einsum('ijk,ijk->jk',std_pos,std_pos )) + + std_x, std_y = std_pos[0],std_pos[1] + + normalization = np.sum(psf*weight) + image_weight = weight*psf + + #Compute the 4th moment for each index + moms.fourth_order_moments = {} + for p,q in pqlist: + moms.fourth_order_moments.update({str(p)+str(q): np.sum(image_weight*std_x**p*std_y**q)/normalization}) + + moms.fourth_moment_1 = moms.fourth_order_moments['40'] - moms.fourth_order_moments['04'] + moms.fourth_moment_2 = 2*(moms.fourth_order_moments['13'] + moms.fourth_order_moments['31']) + self.psf_shapes = np.array( [ [ @@ -326,6 +368,8 @@ def _get_psfshapes(self): moms.observed_shape.g2, moms.moments_sigma, int(bool(moms.error_message)), + moms.fourth_moment_1, + moms.fourth_moment_2, ] for moms in psf_moms ] @@ -350,6 +394,8 @@ def _write_output(self): "E2_PSF_HSM": self.psf_shapes[:, 1], "SIGMA_PSF_HSM": self.psf_shapes[:, 2], "FLAG_PSF_HSM": self.psf_shapes[:, 3].astype(int), + "M_4_PSF_1": self.psf_shapes[:, 4], + "M_4_PSF_2": self.psf_shapes[:, 5], } else: data = {"VIGNET": self.interp_PSFs} @@ -434,6 +480,49 @@ def _get_starshapes(self, star_vign): for star, mask in zip(star_vign, masks) ] + #Compute the list of pq indexes + pqlist = [ + [p, 4-p] for p in range(0, 5) + ] + + #Compute the 4th order moments (Code taken from PSFHOME) + for star, mask, moms in zip(star_vign, masks, star_moms): + + star[mask == 1] = 0 + + #Compute the normalisation of the image + y, x = np.mgrid[:star.shape[0], :star.shape[1]]+1 + + M = np.zeros((2, 2)) + e1 = moms.observed_shape.e1 + e2 = moms.observed_shape.e2 + sigma4 = moms.moments_sigma**4 + c = (1+e1) / (1-e1) + M[1, 1] = np.sqrt(sigma4 / (c-0.25*e2**2*(1+c)**2)) + M[0, 0] = c*M[1, 1] + M[0, 1] = 0.5*e2*(M[1, 1] + M[0, 0]) + M[1, 0] = M[0, 1] + + pos = np.array([x-moms.moments_centroid.x, y-moms.moments_centroid.y]) + inv_M = np.linalg.inv(M) + sqrt_inv_M = alg.sqrtm(inv_M) + + std_pos = np.einsum('ij,jqp->iqp',sqrt_inv_M,pos) + weight = np.exp(-0.5* np.einsum('ijk,ijk->jk',std_pos,std_pos )) + + std_x, std_y = std_pos[0],std_pos[1] + + normalization = np.sum(star*weight) + image_weight = weight*star + + #Compute the 4th moment for each index + moms.fourth_order_moments = {} + for p,q in pqlist: + moms.fourth_order_moments.update({str(p)+str(q): np.sum(image_weight*std_x**p*std_y**q)/normalization}) + + moms.fourth_moment_1 = moms.fourth_order_moments['40'] - moms.fourth_order_moments['04'] + moms.fourth_moment_2 = 2*(moms.fourth_order_moments['13'] + moms.fourth_order_moments['31']) + self.star_shapes = np.array( [ [ @@ -441,6 +530,8 @@ def _get_starshapes(self, star_vign): moms.observed_shape.g2, moms.moments_sigma, int(bool(moms.error_message)), + moms.fourth_moment_1, + moms.fourth_moment_2 ] for moms in star_moms ] @@ -503,10 +594,14 @@ def _write_output_validation(self, star_dict, psfex_cat_dict): "E2_PSF_HSM": self.psf_shapes[:, 1], "SIGMA_PSF_HSM": self.psf_shapes[:, 2], "FLAG_PSF_HSM": self.psf_shapes[:, 3].astype(int), + "M_4_PSF_1": self.psf_shapes[:, 4], + "M_4_PSF_2": self.psf_shapes[:, 5], "E1_STAR_HSM": self.star_shapes[:, 0], "E2_STAR_HSM": self.star_shapes[:, 1], "SIGMA_STAR_HSM": self.star_shapes[:, 2], "FLAG_STAR_HSM": self.star_shapes[:, 3].astype(int), + "M_4_STAR_1": self.star_shapes[:, 4], + "M_4_STAR_2": self.star_shapes[:, 5], } data = {**data, **star_dict} diff --git a/src/shapepipe/utilities/summary.py b/src/shapepipe/utilities/summary.py index 46ad69bd..c99a955b 100755 --- a/src/shapepipe/utilities/summary.py +++ b/src/shapepipe/utilities/summary.py @@ -198,6 +198,15 @@ def check_special_one(module, path): msg = "found array of size 0" return msg, code + m = re.search( + "Not enough stars to interpolate the psf", + line, + ) + if m: + code = 8 + msg = "not enough stars" + return msg, code + if module == "mask_runner": m = re.search("Empty or corrupt FITS file", line) if m: