diff --git a/environment.yml b/environment.yml index cc06efa..55337c3 100644 --- a/environment.yml +++ b/environment.yml @@ -3,6 +3,7 @@ channels: - conda-forge - defaults dependencies: + - selenium - bzip2=1.0.8=h99b78c6_7 - ca-certificates=2024.7.4=hf0a4a13_0 - colorama=0.4.6=pyhd8ed1ab_0 diff --git a/pyproject.toml b/pyproject.toml index b45347e..e40a750 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,10 @@ dependencies = [ "exoplanet-core", "pymc>=4", "schwimmbad", - "astroquery" + "selenium", + "requests", + "astroquery", + "webdriver_manager" ] dynamic = ["version"] diff --git a/src/scope/calc_quantities.py b/src/scope/calc_quantities.py index 6aeaa08..172249c 100644 --- a/src/scope/calc_quantities.py +++ b/src/scope/calc_quantities.py @@ -1,6 +1,7 @@ """ This module contains functions to calculate derived quantities from the input data. """ +import os from math import floor import astropy.constants as const @@ -9,6 +10,11 @@ import numpy as np from exoplanet.orbits.keplerian import KeplerianOrbit +from scope.logger import get_logger +from scope.scrape_igrins_etc import scrape_crires_plus_etc, scrape_igrins_etc + +logger = get_logger() + # refactor the below two functions to be a single function @@ -72,6 +78,34 @@ def calc_param_boilerplate(param, args, data, distance_unit): return data +def calc_snr(param, args, data): + detector_reset_times = { + "IGRINS": 60, + "CRIRES+": 0, + } # these are in seconds. crires+ considers this as part of DIT in the ETC. + if data[param] == -1: # only calculate if set to -1 + kmag = data["Kmag"] + n_exposures = data["n_exposures"] + instrument = data["instrument"] + detector_reset_time = detector_reset_times[instrument] + duration = ( + data["P_rot"] * (data["phase_end"] - data["phase_start"]) * 24 * 3600 + ) # in seconds + exptime = duration / n_exposures - detector_reset_time + # ok this is pretty good lol + + if instrument == "IGRINS": + snr = scrape_igrins_etc(kmag, exptime) + elif instrument == "CRIRES+": + snr = scrape_crires_plus_etc(kmag, exptime) + else: + logger.error(f"Unknown instrument {instrument}!") + # need to construct a JSON object to send to the CRRIES+ SNR calculator. + # ok need to submit the job automatically to the IGRINS SNR calculator. + data[param] = snr + data["snr_path"] = os.getcwd() + "/snr.json" + + def calc_max_npix(param, args, data): # todo: refactor the instrument data to be somwhere! instrument_resolutions_dict = { @@ -80,7 +114,7 @@ def calc_max_npix(param, args, data): } pixel_per_res = {"IGRINS": 3.3, "CRIRES+": 3.3} - if data[param] == 0: + if data[param] == -1: check_args(args, data, param) period = data["P_rot"] mstar = data["Mstar"] @@ -262,7 +296,7 @@ def calc_crossing_time( period = period * u.day dphase_per_exp = (np.min(res_crossing_time_phases) / period).si - phasedur = phase_end - phase_start # degrees. todo: calculate. + phasedur = phase_end - phase_start n_exp = phasedur / dphase_per_exp # todo: actually get phase_start and phase_end in there diff --git a/src/scope/input_output.py b/src/scope/input_output.py index e2b811f..9ed93ab 100644 --- a/src/scope/input_output.py +++ b/src/scope/input_output.py @@ -4,6 +4,7 @@ import argparse import io +import json import os import warnings from datetime import datetime @@ -40,6 +41,7 @@ def __init__(self, message="scope input file error:"): "e": "pl_orbeccen", "peri": "pl_orblper", "v_rot_star": "st_vsin", + "Kmag": "sy_kmag", } @@ -428,6 +430,48 @@ def parse_arguments(): return parser.parse_args() +def read_crires_data(data_path): + """ + Reads in CRIRES data. + + Inputs + ------ + :data_path: (str) path to the data + + Outputs + ------- + n_orders: (int) number of orders + n_pixel: (int) number of pixels + wl_cube_model: (array) wavelength cube model + snrs: (array) signal-to-noise ratios + """ + with open(data_path, "r") as file: + data = json.load(file) + + n_orders = 0 # an integer :) + for i in range(len(data["data"]["orders"])): + order_len = len(data["data"]["orders"][i]["detectors"]) + n_orders += order_len + + n_wavs = len(data["data"]["orders"][i]["detectors"][0]["wavelength"]) + + wl_grid = np.zeros((n_orders, n_wavs)) + snr_grid = np.zeros((n_orders, n_wavs)) + + for i in range(len(data["data"]["orders"])): + order_len = len(data["data"]["orders"][i]["detectors"]) + for j in range(order_len): + wl_grid[i * order_len + j] = data["data"]["orders"][i]["detectors"][j][ + "wavelength" + ] + + snr_grid[i * order_len + j] = data["data"]["orders"][i]["detectors"][j][ + "plots" + ]["snr"]["snr"] + + return n_orders, n_wavs, wl_grid * 1e6, snr_grid + + def refresh_db(): """ Refresh the database with the latest exoplanet data. diff --git a/src/scope/noise.py b/src/scope/noise.py index f1ec60a..dbd0c0a 100644 --- a/src/scope/noise.py +++ b/src/scope/noise.py @@ -81,7 +81,7 @@ def add_quadratic_noise(flux_cube_model, wl_grid, SNR, IGRINS=False, **kwargs): * flux_cube_model[order][exposure] / np.nanmax(flux_cube_model[order][exposure]) ) - + noisy_flux[order][exposure] = np.random.poisson(flux_level) # a few quick checks to make sure that nothing has gone wrong with adding noise @@ -121,7 +121,7 @@ def add_crires_plus_noise(flux_cube_model, wl_grid, SNR): for exposure in range(flux_cube_model.shape[1]): for order in range(flux_cube_model.shape[0]): flux_level = ( - SNR**2 + SNR[order] ** 2 * flux_cube_model[order][exposure] / np.nanmax(flux_cube_model[order][exposure]) ) diff --git a/src/scope/run_simulation.py b/src/scope/run_simulation.py index 6bec9a9..7918e9c 100644 --- a/src/scope/run_simulation.py +++ b/src/scope/run_simulation.py @@ -567,7 +567,7 @@ def simulate_observation( pca_removal=pca_removal, ) - run_name = f"{n_princ_comp}_NPC_{blaze}_blaze_{star}_star_{telluric}_telluric_{SNR}_SNR_{tell_type}_{time_dep_tell}_{wav_error}_{order_dep_throughput}_{seed}" + run_name = f"{n_princ_comp}_NPC_{blaze}_blaze_{star}_star_{telluric}_telluric_{round(np.mean(SNR))}_SNR_{tell_type}_{time_dep_tell}_{wav_error}_{order_dep_throughput}_{seed}" save_data(outdir, run_name, flux_cube, flux_cube_nopca, A_noplanet, just_tellurics) diff --git a/src/scope/scrape_igrins_etc.py b/src/scope/scrape_igrins_etc.py index 2dee690..8ddf4a7 100644 --- a/src/scope/scrape_igrins_etc.py +++ b/src/scope/scrape_igrins_etc.py @@ -1,11 +1,24 @@ """ pings the IGRINS exposure time calculator. + + +includes functions from etc_cli.py, available on the ESO website: e.g., https://etc.eso.org/crires2 """ +import json +import os import re +import sys +import urllib.parse +import numpy as np +import requests from selenium import webdriver from selenium.common.exceptions import ElementNotInteractableException +from selenium.webdriver.chrome.options import Options +from selenium.webdriver.chrome.service import Service from selenium.webdriver.common.by import By +from webdriver_manager.chrome import ChromeDriverManager +from webdriver_manager.microsoft import EdgeChromiumDriverManager from scope.logger import * @@ -28,8 +41,17 @@ def scrape_igrins_etc(kmag, exposure_time): float Exposure time in seconds. """ + options = Options() + options.add_argument("--headless") # Run in headless mode + + # Use WebDriver Manager to handle ChromeDriver installation + service = Service(ChromeDriverManager().install()) + + # Start Chrome WebDriver with options + driver = webdriver.Chrome(service=service, options=options) + + # Start Edge WebDriver with options - driver = webdriver.Chrome() web_address = "https://igrins-jj.firebaseapp.com/etc/simple" driver.get(web_address) @@ -69,3 +91,243 @@ def extract_snr_from_html(html_content): else: logger.warning("SNR value not found.") return snr_value + + +# todo: automatically get the orders? or only allow one settingkey? + + +def get_closest_marcs_teff(teff): + marcs_grid = np.concatenate( + [np.arange(3000, 4100, 100), np.arange(4000, 8250, 250)] + ) + return marcs_grid[ + np.argmin(np.abs(marcs_grid - teff)) + ] # return the closest value in the grid + + +def get_closest_marcs_logg(logg): + marcs_grid = np.arange(3, 5.5, 0.5) + return marcs_grid[ + np.argmin(np.abs(marcs_grid - logg)) + ] # return the closest value in the grid + + +# for now, only K band. +def scrape_crires_plus_etc(kmag, stellar_teff, stellar_logg, exposure_time): + marcs_teff = get_closest_marcs_teff(stellar_teff) + marcs_logg = get_closest_marcs_logg(stellar_logg) + marcs_id = f"{marcs_teff}:{marcs_logg}" + create_json(marcs_id, kmag, [23, 24, 25, 26, 27, 28, 29], exposure_time, "K2148") + + # now ping the CRIRES+ ETC + main_etc_caller("input.json", "output.json") + + return + + +def create_json( + marcs_id, + target_brightness_mag, + instrument_order, + timesnr_DIT, + instrument_settingkey, + sky_airmass=1.0, + sky_moon_fli=0.5, + sky_pwv=2.0, + instrument_slit=0.2, + seeingiqao_turbulence_category=50, + seeingiqao_aperturepix=17, +): + data = { + "target": { + "morphology": {"morphologytype": "point"}, + "sed": { + "sedtype": "spectrum", + "spectrum": { + "spectrumtype": "template", + "params": { + "catalog": "MARCS", + "id": marcs_id, + }, + }, + "extinctionav": 0, + }, + "brightness": { + "brightnesstype": "mag", + "magband": "V", + "mag": target_brightness_mag, + "magsys": "vega", + }, + }, + "sky": {"airmass": sky_airmass, "moon_fli": sky_moon_fli, "pwv": sky_pwv}, + "instrument": { + "slit": instrument_slit, + "settingkey": instrument_settingkey, + "polarimetry": "free", + "order": instrument_order, + }, + "timesnr": {"DET1.NDIT": 1, "DET1.DIT": timesnr_DIT}, + "output": { + "throughput": { + "atmosphere": False, + "telescope": False, + "instrument": False, + "blaze": False, + "enslittedenergy": False, + "detector": False, + "totalinclsky": False, + }, + "snr": {"snr": True, "noise_components": True}, + "sed": {"target": False, "sky": False}, + "signals": {"obstarget": False, "obssky": False, "obstotal": False}, + "maxsignals": { + "maxpixeltarget": False, + "maxpixelsky": False, + "maxpixeltotal": False, + }, + "dispersion": {"dispersion": False}, + "psf": {"psf": False}, + }, + "instrumentName": "crires2", + "seeingiqao": { + "mode": "noao", + "params": {"turbulence_category": seeingiqao_turbulence_category}, + "aperturepix": seeingiqao_aperturepix, + }, + } + + # Writing the JSON to a file + with open("input.json", "w") as json_file: + json.dump(data, json_file, indent=4) + + logger.info("JSON file 'output.json' has been created.") + + +def collapse(jsondata): + def goThrough(x): + if isinstance(x, list): + return goThroughList(x) + elif isinstance(x, dict): + return goThroughDict(x) + else: + return x + + def goThroughDict(dic): + for key, value in dic.items(): + if isinstance(value, dict): + dic[key] = goThroughDict(value) + elif isinstance(value, list): + dic[key] = goThroughList(value) + return dic + + def goThroughList(lst): + if not any(not isinstance(y, (int, float)) for y in lst): # pure numeric list + if len(lst) <= 2: + return lst + else: + return ( + "[" + + str(lst[0]) + + " ... " + + str(lst[-1]) + + "] (" + + str(len(lst)) + + ")" + ) + else: + return [goThrough(y) for y in lst] + + return goThroughDict(jsondata) + + +def callEtc(postdata, url, uploadfile=None): + # Suppress SSL certificate warnings + import warnings + + warnings.filterwarnings("ignore", message="Unverified HTTPS request") + + # If no file is to be uploaded, send a POST request with JSON data: + if uploadfile is None: + try: + return requests.post( + url, + data=json.dumps(postdata), + headers={"Content-Type": "application/json"}, + verify=False, + ) # Send data with SSL verification disabled + except Exception as e: + print( + "Post request without upload returned error:" + str(e) + ) # Print error if POST fails + sys.exit() + else: + # Encode postdata as URL-safe string + encoded_data = urllib.parse.quote(json.dumps(postdata)) + # Construct the URL with filename and encoded JSON data as query parameters. + request_url = ( + f"{url}?filename={os.path.basename(uploadfile)}&data={encoded_data}" + ) + try: + # Open the file to be uploaded in binary read mode + with open(uploadfile, "rb") as f: + # Create a dictionary for files to be uploaded + files = { + "file": (os.path.basename(uploadfile), f) + } # Key 'file' matches the server's expected key + # Make a POST request with the file and encoded JSON data in the URL + return requests.post( + request_url, files=files, verify=False + ) # Send file with SSL verification disabled + except Exception as e: + print( + "Post request with upload returned error:" + str(e) + ) # Print error if POST with file fails + sys.exit() + + +def output(jsondata, outputfile, indent, collapse): + if collapse: + jsondata = collapse(jsondata) + + if outputfile: + with open(outputfile, "w") as of: + of.write(json.dumps(jsondata, indent=indent)) + else: + print(json.dumps(jsondata, indent=indent)) + + +def getEtcUrl(etcname): + if ( + "4most" in etcname.lower() + or "qmost" in etcname.lower() + or "fourmost" in etcname.lower() + ): + return "Fourmost/" + elif "crires" in etcname.lower(): + return "Crires2/" + else: # normal case + return etcname.lower().capitalize() + "/" + + +def getPostdata(instrument_name, postdatafile): + try: + with open(postdatafile) as f: + postdata = json.loads(f.read()) + except OSError: + logger.error("cannot open", postdatafile) + sys.exit() + return postdata + + +def main_etc_caller(uploadfile, outputfile): + etcname = "crires2" + server = "https://etc.eso.org" + baseurl = server + "/observing/etc/etcapi/" + url = baseurl + getEtcUrl(etcname) + indent = 4 + collapse = False + + postdata = getPostdata(etcname, uploadfile) # prepare input + jsondata = callEtc(postdata, url, uploadfile).json() # output + + output(jsondata, outputfile, indent, collapse) diff --git a/src/scope/tests/test_io.py b/src/scope/tests/test_io.py index a2e51f5..89e6043 100644 --- a/src/scope/tests/test_io.py +++ b/src/scope/tests/test_io.py @@ -119,7 +119,7 @@ def sample_files_second(): blaze True star False instrument IGRINS -n_exposures 0 +n_exposures -1 tell_type data-driven # type of telluric simulation. supported modes are ``ATRAN`` and ``data-driven``. time_dep_tell False # whether the tellurics are time-dependent or not. """ @@ -195,13 +195,21 @@ def test_error_on_data_driven_tell(sample_files): assert "Data-driven tellurics requires blaze set to True." in str(exc.value) -def test_calc_n_max_when_input_0(sample_files_second): +def test_calc_n_max_when_input_neg(sample_files_second): input_file_path, db_file_path = sample_files_second data = parse_input_file(input_file_path, db_file_path) assert data["n_exposures"] > 0 and type(data["n_exposures"]) == int +def test_refresh_db(): + """ + just test that a reasonable db comes back + """ + df = refresh_db() + assert len(df) > 0 and "pl_name" in df.columns + + def test_database_columns(): # read in the exoplanet archive data input_file_path = os.path.join( @@ -211,11 +219,3 @@ def test_database_columns(): for value in parameter_mapping.values(): assert value in db.columns - - -def test_refresh_db(): - """ - just test that a reasonable db comes back - """ - df = refresh_db() - assert len(df) > 0 and "pl_name" in df.columns diff --git a/src/scope/tests/test_scraping.py b/src/scope/tests/test_scraping.py new file mode 100644 index 0000000..175ddf5 --- /dev/null +++ b/src/scope/tests/test_scraping.py @@ -0,0 +1,26 @@ +from scope.input_output import read_crires_data +from scope.scrape_igrins_etc import * + + +def test_scrape_igrins(): + res = scrape_igrins_etc(8, 120) + assert res == "100" + + +# now need to test the ... crires+ etc scraper. + + +def test_scrape_crires_plus(): + kmag = 8 + stellar_teff = 5000 + stellar_logg = 4.5 + exposure_time = 120 + res = scrape_crires_plus_etc(kmag, stellar_teff, stellar_logg, exposure_time) + n_orders, n_wavs, wl_grid, snr_grid = read_crires_data("output.json") + assert ( + n_orders > 0 + and n_wavs > 0 + and len(wl_grid) > 0 + and len(snr_grid) > 0 + and np.mean(snr_grid) > 0 + ) diff --git a/src/scope/utils.py b/src/scope/utils.py index 81d1dbd..efde534 100644 --- a/src/scope/utils.py +++ b/src/scope/utils.py @@ -24,44 +24,6 @@ end_clip = 100 -def read_crires_data(data_path): - """ - Reads in CRIRES data. - - Inputs - ------ - :data_path: (str) path to the data - - Outputs - ------- - n_orders: (int) number of orders - n_pixel: (int) number of pixels - wl_cube_model: (array) wavelength cube model - snrs: (array) signal-to-noise ratios - """ - with open(data_path, "r") as file: - data = json.load(file) - - n_orders = 0 # an integer :) - for i in range(len(data["data"]["orders"])): - order_len = len(data["data"]["orders"][i]["detectors"]) - n_orders += order_len - - n_wavs = len(data["data"]["orders"][i]["detectors"][0]["wavelength"]) - - wl_grid = np.zeros((n_orders, n_wavs)) - snr_grid = np.zeros((n_orders, n_wavs)) - - for i in range(len(data["data"]["orders"])): - order_len = len(data["data"]["orders"][i]["detectors"]) - for j in range(order_len): - wl_grid[i * order_len + j] = data["data"]["orders"][i]["detectors"][j][ - "wavelength" - ] - - return n_orders, n_wavs, wl_grid * 1e6, snr_grid - - @njit def doppler_shift_planet_star( model_flux_cube,