Source code for ecpi.pipeline.io.inputs

"""Read data input for pipeline
"""
import json
import logging
import os.path as osp
import os
from urllib import request
import numpy as np
from astropy.table import Table, vstack
from astropy.io import fits

# All Common and mandatory keywords must be downloaded at the beginning
# of the pipeline. They will be put into DataFlow to be available for genfits
# KW_FILES = ['Common-all', 'Obs-id-all', 'Common-gp']
KW_FILES = ['Common-all', 'Obs-id-all']
KEYWORDS_URL = 'https://fsc.svom.eu/jsonschemes/pcards/imports/'
# KEYWORDS_URL = 'https://fsc.svom.eu/jsonschemes/pcards/products/json/'

logger = logging.getLogger(__name__)


[docs]def read_files(l_files): """Load information contained in SVO-ATT-CNV files. :param l_files: list of SVO-ATT-CNV files as specified in parameter file. :type l_files: list :return: dictionary of read values in SVO-ATT-CNV file :rtype: dict """ if isinstance(l_files, list): assert(len(l_files) > 0) t_tab = Table() for file in l_files: if not os.path.isfile(file): logger.error(f"String {file} is not a valid filename") return False t_tmp = t_tab.read(file, format='fits') t_tab = vstack([t_tab, t_tmp], metadata_conflicts='silent') logger.info(f"loading in memory attitude file {file}") elif isinstance(l_files, str) and os.path.isfile(l_files): t_tab = Table() t_tab = t_tab.read(l_files, format='fits') logger.info(f"loading in memory attitude file {l_files}") else: logger.error(f"String {l_files} is not a valid filename or list of filenames") return False return t_tab
[docs]def read_orbit_files(l_files): """Load information contained in SVO-ORB-CNV files. :param l_files: list of SVO-ORB-CNV files as specified in parameter file. :type l_files: list :return: dictionary of read values in SVO-ORB-CNV file :rtype: dict """ assert len(l_files) > 0 t_orb = Table() for file in l_files: t_orb_2 = t_orb.read(file, format='fits') t_orb = vstack([t_orb, t_orb_2], metadata_conflicts='silent') logger.info(f"loading in memory orbit file {file}") return t_orb
[docs]def read_table_svo_att_cnv(p_files): """ :param p_files: """ return read_data_ext(p_files)
[docs]def read_table_svo_orb_cnv(p_files): """ :param p_files: """ return read_data_ext(p_files)
[docs]def read_data_ext(p_files, n_ext=1): """ :param p_files: :param n_ext: """ hdul = fits.open(p_files) ext_data = hdul[n_ext].data hdul.close() return ext_data
[docs]def check_time_coherence_evt_att(t_start, d_attitude): """Check the time consistence between event and attitude files. :param t_start: starting time of the observation in second. :type t_start: float :param d_attitude: dictionary collecting infos from attitude files. :type d_attitude: dict :return: whether origin of time is coherent :rtype: boolean """ date_att = d_attitude['TIME_AAV'] logger.debug(f'check time coherence: t_start={t_start} | att file={date_att}') return np.isclose(t_start, date_att, atol=1e-1)
[docs]def download_common_keywords(): """Downloads the json schemas for the common and mandatory keywords """ common_kws = {} for file in KW_FILES: n_url = f'{KEYWORDS_URL}sdb_upload_{file}.json' logger.info(f'Downloading json KWs file {n_url} from {KEYWORDS_URL} !') json_file, _ = request.urlretrieve(n_url) if osp.isfile(json_file): with open(json_file, "r") as json_schema: common_kws[file] = json.load(json_schema) return common_kws