diff --git a/data/eph.ini b/data/eph.ini new file mode 100644 index 000000000..a16e76b88 --- /dev/null +++ b/data/eph.ini @@ -0,0 +1,130 @@ +# Sorcha Configuration File + + +[INPUT] + +# The simulation used for the ephemeris input. +# Options: "ar", "external" +ephemerides_type = ar + +# Format for ephemeris simulation output file. If reading from an existing temporary ephemeris +# database, this will be ignored. +# Options: csv, whitespace, hdf5 +eph_format = csv + +# SSPP chunks processing by object: how many objects should be processed at once? +size_serial_chunk = 2000 + +# Format for orbit/colour/brightness/cometary data files. +# Options: comma, csv or whitespace +aux_format = csv + +# SQL query for extracting data from the pointing database. +pointing_sql_query = SELECT observationId, observationStartMJD as observationStartMJD_TAI, visitTime, visitExposureTime, filter, seeingFwhmGeom as seeingFwhmGeom_arcsec, seeingFwhmEff as seeingFwhmEff_arcsec, fiveSigmaDepth as fieldFiveSigmaDepth_mag, fieldRA as fieldRA_deg, fieldDec as fieldDec_deg, rotSkyPos as fieldRotSkyPos_deg FROM observations order by observationId + + +[FILTERS] + +# Filters of the observations you are interested in, comma-separated. +# Your physical parameters file must have H calculated in one of these filters +# and colour offset columns defined relative to that filter. +observing_filters = r + + +[SATURATION] + +# Upper magnitude limit on sources that will overfill the detector pixels/have +# counts above the non-linearity regime of the pixels where one can’t do +# photometry. Objects brighter than this limit (in magnitude) will be cut. +# Comment out for no saturation limit. +# Two formats are accepted: +# Single float: applies same saturation limit to observations in all filters. +# Comma-separated list of floats: applies saturation limit per filter, in order as +# given in observing_filters keyword. +bright_limit = -9999 + + +[PHASECURVES] + +# The phase function used to calculate apparent magnitude. The physical parameters input +# file must contain the columns needed to calculate the phase function. +# Options: HG, HG1G2, HG12, linear, none. +phase_function = HG + + +[FOV] + +camera_model = circle +fill_factor = 1.0 + +# Radius of the circle for a circular footprint (in degrees). Float. +# Comment out or do not include if using footprint camera model. +circle_radius = {FOV} + + +[FADINGFUNCTION] + +# Detection efficiency fading function on or off. Uses the fading function as outlined in +# Chelsey and Vereš (2017) to remove observations. +fading_function_on = True + +# Width parameter for fading function. Should be greater than zero and less than 0.5. +# Suggested value is 0.1 after Chelsey and Vereš (2017). +fading_function_width = 0.1 + +# Peak efficiency for the fading function, called the 'fill factor' in Chelsey and Veres (2017). +# Suggested value is 1. Do not change this unless you are sure of what you are doing. +fading_function_peak_efficiency = 1. + + +[SIMULATION] +# Configuration for running the ASSIST+REBOUND ephemerides generator. + +# the field of view of our search field, in degrees +ar_ang_fov = {FOV} + +# the buffer zone around the field of view we want to include, in degrees +ar_fov_buffer = 0.2 + +# the "picket" is our imprecise discretization of time that allows us to move progress +# our simulations forward without getting too granular when we don't have to. +# the unit is number of days. +ar_picket = 1 + +# the obscode is the MPC observatory code for the provided telescope. +ar_obs_code = {OBSCODE} + +# the order of healpix which we will use for the healpy portions of the code. +# the nside is equivalent to 2**ar_healpix_order +ar_healpix_order = 6 + + +[OUTPUT] + +# Output format. +# Options: csv, sqlite3, hdf5 +output_format = csv + +# Controls which columns are in the output files. +# Options are "basic" and "all", which returns all columns. +output_columns = all + + +[LIGHTCURVE] + +# The unique name of the lightcurve model to use. Defined in the ``name_id`` method +# of the subclasses of AbstractLightCurve. If not none, the complex physical parameters +# file must be specified at the command line.lc_model = none +lc_model = none + + +[ACTIVITY] + +# The unique name of the actvity model to use. Defined in the ``name_id`` method +# of the subclasses of AbstractCometaryActivity. If not none, a complex physical parameters +# file must be specified at the command line. +comet_activity = none + +[EXPERT] + +default_SNR_cut=False diff --git a/python/lsst/pipe/tasks/drpAssociationPipe.py b/python/lsst/pipe/tasks/drpAssociationPipe.py index 0a76a11f0..0e5662fd3 100644 --- a/python/lsst/pipe/tasks/drpAssociationPipe.py +++ b/python/lsst/pipe/tasks/drpAssociationPipe.py @@ -58,9 +58,9 @@ class DrpAssociationPipeConnections(pipeBase.PipelineTaskConnections, ssObjectTableRefs = pipeBase.connectionTypes.Input( doc="Reference to catalogs of SolarSolarSystem objects expected to be " "observable in each (visit, detector).", - name="preloaded_DRP_SsObjects", + name="preloaded_ss_object_visit", storageClass="ArrowAstropy", - dimensions=("instrument", "visit", "detector"), + dimensions=("instrument", "visit"), minimum=0, deferLoad=True, multiple=True @@ -236,7 +236,7 @@ def visitDetectorPair(dataRef): diaIdDict[visitDetectorPair(diaCatRef)] = diaCatRef if self.config.doSolarSystemAssociation: for ssCatRef in ssObjectTableRefs: - ssObjectIdDict[visitDetectorPair(ssCatRef)] = ssCatRef + ssObjectIdDict[ssCatRef.dataId["visit"]] = ssCatRef for finalVisitSummaryRef in finalVisitSummaryRefs: finalVisitSummaryIdDict[finalVisitSummaryRef.dataId["visit"]] = finalVisitSummaryRef @@ -247,9 +247,9 @@ def visitDetectorPair(dataRef): associatedSsSources, unassociatedSsObjects = None, None nSsSrc, nSsObj = 0, 0 # Always false if ! self.config.doSolarSystemAssociation - if all([(visit, detector) in ssObjectIdDict and visit in finalVisitSummaryIdDict]): + if (visit in ssObjectIdDict) and (visit in finalVisitSummaryIdDict): # Get the ssCat and finalVisitSummary - ssCat = ssObjectIdDict[(visit, detector)].get() + ssCat = ssObjectIdDict[visit].get() finalVisitSummary = finalVisitSummaryIdDict[visit].get() # Get the exposure metadata from the detector's row in the finalVisitSummary table. visitInfo = finalVisitSummary.find(detector).visitInfo diff --git a/python/lsst/pipe/tasks/generateEphemerides.py b/python/lsst/pipe/tasks/generateEphemerides.py new file mode 100644 index 000000000..c15888c21 --- /dev/null +++ b/python/lsst/pipe/tasks/generateEphemerides.py @@ -0,0 +1,367 @@ +# This file is part of ap_association. +# +# Developed for the LSST Data Management System. +# This product includes software developed by the LSST Project +# (https://www.lsst.org). +# See the COPYRIGHT file at the top-level directory of this distribution +# for details of code ownership. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Generate per-visit solar system ephemerides using Sorcha. + +Sorcha is an open-source solar system survey simulation tool, which +efficiently generates ephemerides for solar system objects from input orbits +and observation pointings. It is currently the fastest robust and maintained +ephemeris generator, and can fit our use case well. + +Sorcha is a command line tool, and is not designed for direct use in python. +This task creates a temporary directory in which to run Sorcha as designed, +providing it with all required input files and reading the Sorcha-generated +ephemerides from a csv. While awkward and un-pipetask-like, it works and takes +advantage of Sorcha's as-designed speed. + +Eventually, this should be replaced with adam_core's ephemeris generation +tools which propagate forward orbital uncertainty into on-sky ellipses. +Doing so will require re-implementing the healpix binning method described +in https://arxiv.org/abs/2506.02140. Doing so will not only improve this code +but also allow on-sky uncertainties to be used during association. When this is +done, mpsky should be modified to do the same. +""" + +__all__ = ["GenerateEphemeridesConfig", "GenerateEphemeridesTask"] + + +import numpy as np +import os +import pandas as pd +from importlib import resources +from sqlite3 import connect as sqlite_connect +from subprocess import run, PIPE +from tempfile import TemporaryDirectory +from textwrap import dedent + + +from lsst.pex.config import Field +from lsst.pipe.base import connectionTypes, NoWorkFound, PipelineTask, \ + PipelineTaskConfig, PipelineTaskConnections, Struct +import lsst.pipe.tasks +from lsst.utils.timer import timeMethod + + +class GenerateEphemeridesConnections(PipelineTaskConnections, + dimensions=("instrument",)): + + visitSummaries = connectionTypes.Input( + doc="Summary of visit information including ra, dec, and time", + name="preliminary_visit_summary", + storageClass="ExposureCatalog", + dimensions={"instrument", "visit"}, + multiple=True + ) + mpcorb = connectionTypes.Input( + doc="Minor Planet Center orbit table used for association", + name="mpcorb", + storageClass="DataFrame", + dimensions={}, + ) + + # The following 9 prequisite inputs are Sorcha's required auxiliary files. + de440s = connectionTypes.PrerequisiteInput( + doc="NAIF DE440 ephemeris file (de440s.bsp)", + name="de440s", + storageClass="SSPAuxiliaryFile", + dimensions={}, + ) + sb441_n16 = connectionTypes.PrerequisiteInput( + doc="NAIF DE440 ephemeris file (sb441_n16.bsp)", + name="sb441_n16", + storageClass="SSPAuxiliaryFile", + dimensions={}, + ) + obsCodes = connectionTypes.PrerequisiteInput( + doc="MPC observatory code file (ObsCodes.json)", + name="obscodes", + storageClass="SSPAuxiliaryFile", + dimensions={}, + ) + linux_p1550p2650 = connectionTypes.PrerequisiteInput( + doc="TODO (linux_p1550p2650.440)", + name="linux_p1550p2650", + storageClass="SSPAuxiliaryFile", + dimensions={}, + ) + pck00010 = connectionTypes.PrerequisiteInput( + doc="orientation of planets, moons, the Sun, and selected asteroids. (pck00010.pck)", + name="pck00010", + storageClass="SSPAuxiliaryFile", + dimensions={}, + ) + earth_latest_high_prec = connectionTypes.PrerequisiteInput( + doc="High-precision Earth orientation parameters (EOP) kernel", + name="earth_latest_high_prec", + storageClass="SSPAuxiliaryFile", + dimensions={}, + ) + earth_620120_250826 = connectionTypes.PrerequisiteInput( + doc="Historical EOP", + name="earth_620120_250826", + storageClass="SSPAuxiliaryFile", + dimensions={}, + ) + earth_2025_250826_2125_predict = connectionTypes.PrerequisiteInput( + doc="Longterm EOP predictions", + name="earth_2025_250826_2125_predict", + storageClass="SSPAuxiliaryFile", + dimensions={}, + ) + naif0012 = connectionTypes.PrerequisiteInput( + doc="Leapsecond tls file", + name="naif0012", + storageClass="SSPAuxiliaryFile", + dimensions={}, + ) + + ssObjects = connectionTypes.Output( + doc="Sorcha-provided Solar System objects observable in this detector-visit", + name="preloaded_ss_object_visit", + storageClass="DataFrame", + dimensions=("instrument", "visit"), + multiple=True, + ) + + +class GenerateEphemeridesConfig( + PipelineTaskConfig, + pipelineConnections=GenerateEphemeridesConnections): + observatoryCode = Field( + dtype=str, + doc="IAU Minor Planet Center observer code for queries " + "(default is X05 for Rubin Obs./LSST)", + default='X05' + ) + observatoryFOVRadius = Field( + dtype=float, + doc="The field of view of the observatory (degrees)", + default=2.06, + ) + + +class GenerateEphemeridesTask(PipelineTask): + """Task to query the Sorcha service and retrieve the solar system objects + that are observable within the input visit. + """ + ConfigClass = GenerateEphemeridesConfig + _DefaultName = "GenerateEphemerides" + + def runQuantum( + self, + butlerQC, + inputRefs, + outputRefs, + ): + """Do butler IO and transform to provide in memory + objects for tasks `~Task.run` method. + + Parameters + ---------- + butlerQC : `QuantumContext` + A butler which is specialized to operate in the context of a + `lsst.daf.butler.Quantum`. + inputRefs : `InputQuantizedConnection` + Datastructure whose attribute names are the names that identify + connections defined in corresponding `PipelineTaskConnections` + class. The values of these attributes are the + `lsst.daf.butler.DatasetRef` objects associated with the defined + input/prerequisite connections. + outputRefs : `OutputQuantizedConnection` + Datastructure whose attribute names are the names that identify + connections defined in corresponding `PipelineTaskConnections` + class. The values of these attributes are the + `lsst.daf.butler.DatasetRef` objects associated with the defined + output connections. + """ + inputs = butlerQC.get(inputRefs) + outputs = self.run(**inputs) + for ref in outputRefs.ssObjects: + dataId = ref.dataId + ephemeris_visit = outputs.ssObjects[dataId['visit']] + butlerQC.put(ephemeris_visit, ref) + + @timeMethod + def run(self, visitSummaries, mpcorb, de440s, sb441_n16, obsCodes, linux_p1550p2650, pck00010, + earth_latest_high_prec, earth_620120_250826, earth_2025_250826_2125_predict, naif0012): + """Parse the information on the current visit and retrieve the + observable solar system objects from Sorcha. + + Parameters + ---------- + visitSummary : `lsst.afw.table.ExposureCatalog` + Has rows with .getVisitInfo, which give the center and time of exposure + + mpcorb, de440s, sb441_n16, obsCodes, linux_p1550p2650, pck00010, + earth_latest_high_prec, earth_620120_250826, earth_2025_250826_2125_predict, + naif0012 : `lsst.pipe.tasks.sspAuxiliaryFile.SSPAuxiliaryFile`s + Minor Planet Center orbit table used for association + + Returns + ------- + result : `lsst.pipe.base.Struct` + Results struct with components: + + - ``ssObjects`` : `pandas.DataFrame` + DataFrame containing Solar System Objects near the detector + footprint as retrieved by Sorcha. The columns are as follows: + + ``Name`` + object name (`str`) + ``ra`` + RA in decimal degrees (`float`) + ``dec`` + DEC in decimal degrees (`float`) + + """ + if len(visitSummaries) == 0: + raise NoWorkFound("No visits input!") + visitInfos = [vs[0].getVisitInfo() for vs in visitSummaries] + fieldRA = np.array([vi.getBoresightRaDec().getRa().asDegrees() for vi in visitInfos]) + fieldDec = np.array([vi.getBoresightRaDec().getDec().asDegrees() for vi in visitInfos]) + epochMJD = np.array([vi.date.toAstropy().tai.mjd for vi in visitInfos]) + visits = [vi.id for vi in visitInfos] + + # Confused about seconds/days units here + n = len(epochMJD) + visitTime = np.ones(n) * 30.0 # seconds + inputVisits = pd.DataFrame({ + "observationMidpointMJD": epochMJD, + "fieldRA": fieldRA, + "fieldDec": fieldDec, + "observationId": visits, + "visitTime": np.ones(n) * visitTime, + "observationStartMJD": epochMJD - (visitTime / 2) / 86400.0, + "visitExposureTime": visitTime, + + # The following columns are required by Socha but only used after ephemeris generation + "filter": ["r"] * n, + "seeingFwhmGeom": [-1] * n, + "seeingFwhmEff": [-1] * n, + "fiveSigmaDepth": [-1] * n, + "rotSkyPos": [-1] * n, + }) + + inputOrbits = mpcorb[ + ['packed_primary_provisional_designation', 'q', 'e', 'i', + 'node', 'argperi', 'peri_time', 'epoch_mjd'] + ].rename(columns={'packed_primary_provisional_designation': 'ObjID', 'epoch_mjd': 'epochMJD_TDB', + 'i': 'inc', 'argperi': 'argPeri', 'peri_time': 't_p_MJD_TDB'}) + inputOrbits['FORMAT'] = 'COM' + # Colors exactly like jake's prep_input_colors + inputColors = inputOrbits[["ObjID"]].copy() + inputColors["H_r"] = mpcorb['h'] + inputColors["GS"] = 0.15 # Traditional + + eph_str = resources.files(lsst.pipe.tasks).parents[3].joinpath("data/eph.ini").read_text() + eph_str = eph_str.replace("{OBSCODE}", self.config.observatoryCode) + eph_str = eph_str.replace("{FOV}", str(self.config.observatoryFOVRadius)) + + with TemporaryDirectory() as tmpdirname: + self.log.info(f'temp dir: {tmpdirname}') + + # Orbits + inputOrbits.to_csv(f'{tmpdirname}/orbits.csv', index=False) + # Observations SQLite + conn = sqlite_connect(f'{tmpdirname}/pointings.db') + inputVisits.to_sql('observations', conn, if_exists='replace', index=False) + conn.close() + + with open(f'{tmpdirname}/eph.ini', 'w') as ephFile: + ephFile.write(eph_str) + + inputColors.to_csv(f'{tmpdirname}/colors.csv', index=False) + + cache = f'{tmpdirname}/sorcha_cache/' + self.log.info('making cache') + os.mkdir(cache) + # DONE + for filename, fileref in [ + ('de440s.bsp', de440s), + ('sb441-n16.bsp', sb441_n16), + ('ObsCodes.json', obsCodes), + ('linux_p1550p2650.440', linux_p1550p2650), + ('pck00010.pck', pck00010), + ('earth_latest_high_prec.bpc', earth_latest_high_prec), + ('earth_620120_250826.bpc', earth_620120_250826), + ('earth_2025_250826_2125_predict.bpc', earth_2025_250826_2125_predict), + ('naif0012.tls', naif0012), + ]: + self.log.info(f'writing {filename}') + with open(cache + filename, 'wb') as file: + file.write(fileref.fileContents.read()) + + meta_kernel_text = dedent(f"""\ + \\begindata + + PATH_VALUES = ('{tmpdirname}/sorcha_cache/') + + PATH_SYMBOLS = ('A') + + KERNELS_TO_LOAD=( + '$A/naif0012.tls', + '$A/earth_620120_250826.bpc', + '$A/earth_2025_250826_2125_predict.bpc', + '$A/pck00010.pck', + '$A/de440s.bsp', + '$A/earth_latest_high_prec.bpc', + ) + + \\begintext + """) + with open(f'{tmpdirname}/sorcha_cache/meta_kernel.txt', 'w') as meta_kernel_file: + meta_kernel_file.write(meta_kernel_text) + self.log.info('Sorcha process begun') + + result = run( + [ + "sorcha", + "run", + "-c", f"{tmpdirname}/eph.ini", + "-o", f"{tmpdirname}/", + "--ob", f"{tmpdirname}/orbits.csv", + "-p", f"{tmpdirname}/colors.csv", + "--pd", f"{tmpdirname}/pointings.db", + "--ew", f"{tmpdirname}/ephemeris", + "--ar", f"{tmpdirname}/sorcha_cache/" + ], + stdout=PIPE, + stderr=PIPE, + text=True + ) + + self.log.info(f"Sorcha STDOUT:\n {result.stdout}") + self.log.info(f"Sorcha STDERR:\n {result.stderr}") + + eph_path = f'{tmpdirname}/ephemeris.csv' + if not os.path.exists(eph_path): + raise FileNotFoundError( + " Sorcha did not create ephemeris. Check STDOUT/STDERR above. " + f"Directory contents:\n{os.listdir(tmpdirname)}" + ) + + # Return Sorcha output directly + ephemeris = pd.read_csv(eph_path) + perVisitSsObjects = {FieldID: group for FieldID, group in ephemeris.groupby("FieldID")} + for v in visits: + if v not in perVisitSsObjects: + perVisitSsObjects[v] = ephemeris.iloc[:0] + return Struct(ssObjects=perVisitSsObjects) diff --git a/python/lsst/pipe/tasks/ssoAssociation.py b/python/lsst/pipe/tasks/ssoAssociation.py index 230bcd89f..ef825534c 100644 --- a/python/lsst/pipe/tasks/ssoAssociation.py +++ b/python/lsst/pipe/tasks/ssoAssociation.py @@ -35,6 +35,7 @@ import lsst.pex.config as pexConfig import lsst.pipe.base as pipeBase from lsst.utils.timer import timeMethod +from lsst.pipe.tasks.associationUtils import obj_id_to_ss_object_id class SolarSystemAssociationConfig(pexConfig.Config): @@ -67,7 +68,7 @@ class SolarSystemAssociationTask(pipeBase.Task): _DefaultName = "ssoAssociation" @timeMethod - def run(self, diaSourceCatalog, solarSystemObjects, visitInfo, bbox, wcs): + def run(self, diaSourceCatalog, ssObjects, visitInfo, bbox, wcs): """Create a searchable tree of unassociated DiaSources and match to the nearest ssoObject. @@ -76,7 +77,7 @@ def run(self, diaSourceCatalog, solarSystemObjects, visitInfo, bbox, wcs): diaSourceCatalog : `astropy.table.Table` Catalog of DiaSources. Modified in place to add ssObjectId to successfully associated DiaSources. - solarSystemObjects : `astropy.table.Table` + ssObjects : `astropy.table.Table` Set of solar system objects that should be within the footprint of the current visit. visitInfo : `lsst.afw.image.VisitInfo` @@ -101,79 +102,108 @@ def run(self, diaSourceCatalog, solarSystemObjects, visitInfo, bbox, wcs): - ``ssSourceData`` : ssSource table data. (`Astropy.table.Table`) """ - nSolarSystemObjects = len(solarSystemObjects) + nSolarSystemObjects = len(ssObjects) if nSolarSystemObjects <= 0: - return self._return_empty(diaSourceCatalog, solarSystemObjects) + return self._return_empty(diaSourceCatalog, ssObjects) mjd_midpoint = visitInfo.date.toAstropy().tai.mjd - ref_time = mjd_midpoint - solarSystemObjects["tmin"].value[0] # all tmin should be identical - - solarSystemObjects['obs_position'] = [ - np.array([chebval(ref_time, row['obs_x_poly']), - chebval(ref_time, row['obs_y_poly']), - chebval(ref_time, row['obs_z_poly'])]) - for row in solarSystemObjects] - solarSystemObjects['obs_velocity'] = [ - np.array([chebval(ref_time, Chebyshev(row['obs_x_poly']).deriv().coef), - chebval(ref_time, Chebyshev(row['obs_y_poly']).deriv().coef), - chebval(ref_time, Chebyshev(row['obs_z_poly']).deriv().coef)]) - for row in solarSystemObjects] - solarSystemObjects['obj_position'] = [ - np.array([chebval(ref_time, row['obj_x_poly']), - chebval(ref_time, row['obj_y_poly']), - chebval(ref_time, row['obj_z_poly'])]) - for row in solarSystemObjects] - solarSystemObjects['obj_velocity'] = [ - np.array([chebval(ref_time, Chebyshev(row['obj_x_poly']).deriv().coef), - chebval(ref_time, Chebyshev(row['obj_y_poly']).deriv().coef), - chebval(ref_time, Chebyshev(row['obj_z_poly']).deriv().coef)]) - for row in solarSystemObjects] - vector = np.vstack(solarSystemObjects['obj_position'].value - - solarSystemObjects['obs_position'].value) - ras, decs = np.vstack(hp.vec2ang(vector, lonlat=True)) - solarSystemObjects['ra'] = ras - solarSystemObjects['dec'] = decs - solarSystemObjects['obs_position_x'], solarSystemObjects['obs_position_y'], \ - solarSystemObjects['obs_position_z'] = solarSystemObjects['obs_position'].value.T - solarSystemObjects['heliocentricX'], solarSystemObjects['heliocentricY'], \ - solarSystemObjects['heliocentricZ'] = solarSystemObjects['obj_position'].value.T - solarSystemObjects['obs_velocity_x'], solarSystemObjects['obs_velocity_y'], \ - solarSystemObjects['obs_velocity_z'] = solarSystemObjects['obs_velocity'].value.T - solarSystemObjects['heliocentricVX'], solarSystemObjects['heliocentricVY'], \ - solarSystemObjects['heliocentricVZ'] = solarSystemObjects['obj_velocity'].value.T - solarSystemObjects['topocentric_position'], solarSystemObjects['topocentric_velocity'] = ( - solarSystemObjects['obj_position'] - solarSystemObjects['obs_position'], - solarSystemObjects['obj_velocity'] - solarSystemObjects['obs_velocity'], - ) - solarSystemObjects['topocentricX'], solarSystemObjects['topocentricY'], \ - solarSystemObjects['topocentricZ'] = ( - np.array(list(solarSystemObjects['topocentric_position'].value)).T - ) - solarSystemObjects['topocentricVX'], solarSystemObjects['topocentricVY'], \ - solarSystemObjects['topocentricVZ'] = ( - np.array(list(solarSystemObjects['topocentric_velocity'].value)).T - ) - solarSystemObjects['heliocentricVX'], solarSystemObjects['heliocentricVY'], \ - solarSystemObjects['heliocentricVZ'] = np.array(list(solarSystemObjects['obj_velocity'].value)).T - solarSystemObjects['heliocentricDist'] = np.linalg.norm(solarSystemObjects['obj_position'], axis=1) - solarSystemObjects['topocentricDist'] = np.linalg.norm(solarSystemObjects['topocentric_position'], - axis=1) - solarSystemObjects['phaseAngle'] = np.degrees(np.arccos(np.sum( - solarSystemObjects['obj_position'].T * solarSystemObjects['topocentric_position'].T - / solarSystemObjects['heliocentricDist'] / solarSystemObjects['topocentricDist'], axis=0 - ))) + if 'obs_x_poly' in ssObjects.columns: + ref_time = mjd_midpoint - ssObjects["tmin"].value[0] # all tmin should be identical + ssObjects['obs_position'] = [ + np.array([chebval(ref_time, row['obs_x_poly']), + chebval(ref_time, row['obs_y_poly']), + chebval(ref_time, row['obs_z_poly'])]) + for row in ssObjects] + ssObjects['obs_velocity'] = [ + np.array([chebval(ref_time, Chebyshev(row['obs_x_poly']).deriv().coef), + chebval(ref_time, Chebyshev(row['obs_y_poly']).deriv().coef), + chebval(ref_time, Chebyshev(row['obs_z_poly']).deriv().coef)]) + for row in ssObjects] + ssObjects['obj_position'] = [ + np.array([chebval(ref_time, row['obj_x_poly']), + chebval(ref_time, row['obj_y_poly']), + chebval(ref_time, row['obj_z_poly'])]) + for row in ssObjects] + ssObjects['obj_velocity'] = [ + np.array([chebval(ref_time, Chebyshev(row['obj_x_poly']).deriv().coef), + chebval(ref_time, Chebyshev(row['obj_y_poly']).deriv().coef), + chebval(ref_time, Chebyshev(row['obj_z_poly']).deriv().coef)]) + for row in ssObjects] + vector = np.vstack(ssObjects['obj_position'].value - ssObjects['obs_position'].value) + ras, decs = np.vstack(hp.vec2ang(vector, lonlat=True)) + ssObjects['ra'] = ras + ssObjects['dec'] = decs + ssObjects['obs_position_x'], ssObjects['obs_position_y'], \ + ssObjects['obs_position_z'] = ssObjects['obs_position'].value.T + ssObjects['heliocentricX'], ssObjects['heliocentricY'], \ + ssObjects['heliocentricZ'] = ssObjects['obj_position'].value.T + ssObjects['obs_velocity_x'], ssObjects['obs_velocity_y'], \ + ssObjects['obs_velocity_z'] = ssObjects['obs_velocity'].value.T + ssObjects['heliocentricVX'], ssObjects['heliocentricVY'], \ + ssObjects['heliocentricVZ'] = ssObjects['obj_velocity'].value.T + ssObjects['topocentric_position'], ssObjects['topocentric_velocity'] = ( + ssObjects['obj_position'] - ssObjects['obs_position'], + ssObjects['obj_velocity'] - ssObjects['obs_velocity'], + ) + ssObjects['topocentricX'], ssObjects['topocentricY'], ssObjects['topocentricZ'] = ( + np.array(list(ssObjects['topocentric_position'].value)).T + ) + ssObjects['topocentricVX'], ssObjects['topocentricVY'], ssObjects['topocentricVZ'] = ( + np.array(list(ssObjects['topocentric_velocity'].value)).T + ) + ssObjects['heliocentricVX'], ssObjects['heliocentricVY'], \ + ssObjects['heliocentricVZ'] = np.array(list(ssObjects['obj_velocity'].value)).T + ssObjects['heliocentricDist'] = np.linalg.norm(ssObjects['obj_position'], axis=1) + ssObjects['topocentricDist'] = np.linalg.norm(ssObjects['topocentric_position'], axis=1) + ssObjects['phaseAngle'] = np.degrees(np.arccos(np.sum( + ssObjects['obj_position'].T * ssObjects['topocentric_position'].T + / ssObjects['heliocentricDist'] / ssObjects['topocentricDist'], axis=0 + ))) + marginArcsec = ssObjects["Err(arcsec)"].max() + + columns_to_drop = [ + "obs_position", "obs_velocity", "obj_position", "obj_velocity", "topocentric_position", + "topocentric_velocity", "obs_x_poly", "obs_y_poly", "obs_z_poly", "obj_x_poly", "obj_y_poly", + "obj_z_poly", "associated" + ] + + else: + ssObjects.rename_columns( + ['RA_deg', 'Dec_deg', 'phase_deg', 'Range_LTC_km', 'Obj_Sun_x_LTC_km', 'Obj_Sun_y_LTC_km', + 'Obj_Sun_z_LTC_km', 'Obj_Sun_vx_LTC_km_s', 'Obj_Sun_vy_LTC_km_s', 'Obj_Sun_vz_LTC_km_s'], + ['ra', 'dec', 'phaseAngle', 'topocentricDist', 'heliocentricX', 'heliocentricY', + 'heliocentricZ', 'heliocentricVX', 'heliocentricVY', 'heliocentricVZ']) + ssObjects['ssObjectId'] = [obj_id_to_ss_object_id(v) for v in ssObjects['ObjID']] + ssObjects['heliocentricDist'] = ( + np.sqrt(ssObjects['heliocentricX']**2 + ssObjects['heliocentricY']**2 + + ssObjects['heliocentricZ']**2) + ) + for substring1, substring2 in [('X', 'x_km'), ('Y', 'y_km'), ('Z', 'z_km'), + ('VX', 'vx_km_s'), ('VY', 'vy_km_s'), ('VZ', 'vz_km_s')]: + topoName = 'topocentric' + substring1 + helioName = 'heliocentric' + substring1 + obsName = 'Obs_Sun_' + substring2 + ssObjects[topoName] = ssObjects[helioName] - ssObjects[obsName] + + marginArcsec = 1.0 # TODO: justify + + columns_to_drop = ['FieldID', 'fieldMJD_TAI', 'fieldJD_TDB', + 'RangeRate_LTC_km_s', 'RARateCosDec_deg_day', + 'DecRate_deg_day', 'Obs_Sun_x_km', 'Obs_Sun_y_km', 'Obs_Sun_z_km', + 'Obs_Sun_vx_km_s', 'Obs_Sun_vy_km_s', 'Obs_Sun_vz_km_s', + '__index_level_0__'] stateVectorColumns = ['heliocentricX', 'heliocentricY', 'heliocentricZ', 'heliocentricVX', 'heliocentricVY', 'heliocentricVZ', 'topocentricX', 'topocentricY', 'topocentricZ', 'topocentricVX', 'topocentricVY', 'topocentricVZ'] - mpcorbColumns = [col for col in solarSystemObjects.columns if col[:7] == 'MPCORB_'] + mpcorbColumns = [col for col in ssObjects.columns if col[:7] == 'MPCORB_'] maskedObjects = self._maskToCcdRegion( - solarSystemObjects, + ssObjects, bbox, wcs, - solarSystemObjects["Err(arcsec)"].max()).copy() + marginArcsec).copy() nSolarSystemObjects = len(maskedObjects) if nSolarSystemObjects <= 0: return self._return_empty(diaSourceCatalog, maskedObjects) @@ -244,11 +274,6 @@ def run(self, diaSourceCatalog, solarSystemObjects, visitInfo, bbox, wcs): ssSourceData['eclipticLambda'] = coords.barycentrictrueecliptic.lon.deg ssSourceData['eclipticBeta'] = coords.barycentrictrueecliptic.lat.deg unassociatedObjects = maskedObjects[unAssocObjectMask] - columns_to_drop = [ - "obs_position", "obs_velocity", "obj_position", "obj_velocity", "topocentric_position", - "topocentric_velocity", "obs_x_poly", "obs_y_poly", "obs_z_poly", "obj_x_poly", "obj_y_poly", - "obj_z_poly", "associated" - ] unassociatedObjects.remove_columns(columns_to_drop) return pipeBase.Struct( ssoAssocDiaSources=diaSourceCatalog[assocSourceMask], @@ -258,13 +283,13 @@ def run(self, diaSourceCatalog, solarSystemObjects, visitInfo, bbox, wcs): associatedSsSources=ssSourceData, unassociatedSsObjects=unassociatedObjects) - def _maskToCcdRegion(self, solarSystemObjects, bbox, wcs, marginArcsec): + def _maskToCcdRegion(self, ssObjects, bbox, wcs, marginArcsec): """Mask the input SolarSystemObjects to only those in the exposure bounding box. Parameters ---------- - solarSystemObjects : `astropy.table.Table` + ssObjects : `astropy.table.Table` SolarSystemObjects to mask to ``exposure``. bbox : Exposure bbox used for masking @@ -280,17 +305,17 @@ def _maskToCcdRegion(self, solarSystemObjects, bbox, wcs, marginArcsec): maskedSolarSystemObjects : `astropy.table.Table` Set of SolarSystemObjects contained within the exposure bounds. """ - if len(solarSystemObjects) == 0: - return solarSystemObjects + if len(ssObjects) == 0: + return ssObjects padding = min( int(np.ceil(marginArcsec / wcs.getPixelScale(bbox.getCenter()).asArcseconds())), self.config.maxPixelMargin) - return solarSystemObjects[bbox_contains_sky_coords( + return ssObjects[bbox_contains_sky_coords( bbox, wcs, - solarSystemObjects['ra'].value * u.degree, - solarSystemObjects['dec'].value * u.degree, + ssObjects['ra'].value * u.degree, + ssObjects['dec'].value * u.degree, padding)] def _radec_to_xyz(self, ras, decs): diff --git a/python/lsst/pipe/tasks/sspAuxiliaryFile.py b/python/lsst/pipe/tasks/sspAuxiliaryFile.py new file mode 100644 index 000000000..494753358 --- /dev/null +++ b/python/lsst/pipe/tasks/sspAuxiliaryFile.py @@ -0,0 +1,44 @@ +from lsst.daf.butler import FormatterV2 +from lsst.resources import ResourcePath +from io import BytesIO +from typing import Any + +__all__ = ["SSPAuxiliaryFile", "SSPAuxiliaryFileFormatter"] + + +class SSPAuxiliaryFile(): + """Class to hold information about auxiliary files needed for + solar system object ephemeris calculations. + """ + fileContents = None + + def __init__(self, fileContents): + self.fileContents = fileContents + + +class SSPAuxiliaryFileFormatter(FormatterV2): + """Formatter for SSP Auxiliary Files. + """ + can_read_from_uri = True + + def read_from_uri(self, uri: ResourcePath, component: str | None = None, expected_size: int = -1) -> Any: + """Read a dataset. + + Parameters + ---------- + uri : `lsst.ResourcePath` + Location of the file to read. + component : `str` or `None`, optional + Component to read from the file. + expected_size : `int`, optional + Expected size of the file. + + Returns + ------- + payload : `SSPAuxiliaryFile` + The requested data as a Python object. + """ + return SSPAuxiliaryFile(BytesIO(uri.read())) + + def to_bytes(self, in_memory_dataset: Any) -> bytes: + return in_memory_dataset.fileContents