diff --git a/scopesim/effects/fits_headers.py b/scopesim/effects/fits_headers.py index f9b5ab46..b237fcb0 100644 --- a/scopesim/effects/fits_headers.py +++ b/scopesim/effects/fits_headers.py @@ -9,6 +9,7 @@ from astropy.table import Table from . import Effect +from ..source.source_fields import HDUSourceField, TableSourceField from ..utils import from_currsys, find_file @@ -530,19 +531,19 @@ def apply_to(self, hdul, **kwargs): if (src := opt_train._last_source) is not None: prefix = self.meta["keyword_prefix"] for i, field in enumerate(src.fields): - src_class = field.__class__.__name__ - src_dic = deepcopy(src._meta_dicts[i]) - if isinstance(field, fits.ImageHDU): + src_class = field.field.__class__.__name__ + src_dic = deepcopy(src.meta) + if isinstance(field, HDUSourceField): hdr = field.header for key in hdr: src_dic = {key: [hdr[key], hdr.comments[key]]} - elif isinstance(field, Table): + elif isinstance(field, TableSourceField): src_dic.update(field.meta) src_dic["length"] = len(field) - for j, name in enumerate(field.colnames): + for j, name in enumerate(field.field.colnames): src_dic[f"col{j}_name"] = name - src_dic[f"col{j}_unit"] = str(field[name].unit) + src_dic[f"col{j}_unit"] = str(field.field[name].unit) self.dict_list = [{"ext_number": self.meta["ext_number"], "keywords": { diff --git a/scopesim/effects/spectral_efficiency.py b/scopesim/effects/spectral_efficiency.py index 628281c0..73db00f2 100644 --- a/scopesim/effects/spectral_efficiency.py +++ b/scopesim/effects/spectral_efficiency.py @@ -106,10 +106,10 @@ def apply_to(self, obj, **kwargs): logger.warning("No grating efficiency for trace %s", trace_id) return obj - wcs = WCS(obj.hdu.header).spectral - wave_cube = wcs.all_pix2world(np.arange(obj.hdu.data.shape[0]), 0)[0] - wave_cube = (wave_cube * u.Unit(wcs.wcs.cunit[0])).to(u.AA) - obj.hdu = apply_throughput_to_cube(obj.hdu, effic.throughput) + swcs = WCS(obj.hdu.header).spectral + with u.set_enabled_equivalencies(u.spectral()): + wave = swcs.pixel_to_world(np.arange(swcs.pixel_shape[0])) << u.um + obj.hdu = apply_throughput_to_cube(obj.hdu, effic.throughput, wave) return obj def plot(self): diff --git a/scopesim/effects/ter_curves.py b/scopesim/effects/ter_curves.py index 0ddea9c1..a6831744 100644 --- a/scopesim/effects/ter_curves.py +++ b/scopesim/effects/ter_curves.py @@ -16,6 +16,7 @@ from ..base_classes import SourceBase, FOVSetupBase from ..optics.surface import SpectralSurface from ..source.source import Source +from ..source.source_fields import CubeSourceField, SpectrumSourceField from ..utils import (from_currsys, quantify, check_keys, find_file, figure_factory, get_logger) @@ -113,13 +114,20 @@ def apply_to(self, obj, **kwargs): thru = self.throughput # apply transmission to source spectra - for isp, spec in enumerate(obj.spectra): - obj.spectra[isp] = combine_two_spectra(spec, thru, "multiply", - wave_min, wave_max) - - # apply transmission to cube fields - for icube, cube in enumerate(obj.cube_fields): - obj.cube_fields[icube] = apply_throughput_to_cube(cube, thru) + for fld in obj.fields: + if isinstance(fld, CubeSourceField): + fld.field = apply_throughput_to_cube( + fld.field, thru, fld.wave) + elif isinstance(fld, SpectrumSourceField): + fld.spectra = { + isp: combine_two_spectra(spec, thru, "multiply", + wave_min, wave_max) + for isp, spec in fld.spectra.items() + } + else: + # Rather log than raise here, can still move on + logger.error("Source field is neither Cube nor has " + "spectra, this shouldn't occur...") # add the effect background to the source background field if self.background_source is not None: diff --git a/scopesim/effects/ter_curves_utils.py b/scopesim/effects/ter_curves_utils.py index 56802631..0ecbfecb 100644 --- a/scopesim/effects/ter_curves_utils.py +++ b/scopesim/effects/ter_curves_utils.py @@ -8,7 +8,7 @@ from astropy.table import Table, QTable from astropy.io.votable import parse_single_table from astropy.io import ascii as ioascii -from astropy.wcs import WCS +from astropy.io import fits from synphot import SpectralElement, SourceSpectrum, Empirical1D, Observation from synphot.units import PHOTLAM @@ -353,25 +353,32 @@ def scale_spectrum(spectrum, filter_name, amplitude): return spectrum -def apply_throughput_to_cube(cube, thru): +def apply_throughput_to_cube( + cube: fits.ImageHDU, + thru: SpectralElement | SourceSpectrum, + wave_cube: u.Quantity, +) -> fits.ImageHDU: """ Apply throughput curve to a spectroscopic cube. Parameters ---------- - cube : ImageHDU - Three-dimensional image, dimension 0 (in python convention) is the - spectral dimension. WCS is required. - thru : synphot.SpectralElement, synphot.SourceSpectrum + cube : fits.ImageHDU + Three-dimensional image, dimension 0 (in python convention) is the + spectral dimension. + thru : SpectralElement | SourceSpectrum + Throughput curve, spectrum or filter. + wave_cube : u.Quantity["length"] + Wavelength axis of the cube. Returns ------- - cube : ImageHDU, header unchanged, data multiplied with - wavelength-dependent throughput. + cube : fits.ImageHDU + Header unchanged, data multiplied with wavelength-dependent throughput. + """ - wcs = WCS(cube.header).spectral - wave_cube = wcs.all_pix2world(np.arange(cube.data.shape[0]), 0)[0] - wave_cube = (wave_cube * u.Unit(wcs.wcs.cunit[0])).to(u.AA) + # Note: wave_cube used to be converted to AA, but synphot understands um + # or whatever just as well... cube.data *= thru(wave_cube).value[:, None, None] return cube diff --git a/scopesim/optics/fov.py b/scopesim/optics/fov.py index 1b91a178..a354ebda 100644 --- a/scopesim/optics/fov.py +++ b/scopesim/optics/fov.py @@ -121,7 +121,7 @@ def extract_from(self, src): if not isinstance(src, SourceBase): raise ValueError(f"source must be a Source object: {type(src)}") - fields_in_fov = [field for field in src.fields + fields_in_fov = [field.field for field in src.fields if fu.is_field_in_fov(self.header, field)] if not fields_in_fov: logger.warning("No fields in FOV.") diff --git a/scopesim/optics/fov_utils.py b/scopesim/optics/fov_utils.py index 582547f6..a69a91c9 100644 --- a/scopesim/optics/fov_utils.py +++ b/scopesim/optics/fov_utils.py @@ -7,6 +7,7 @@ from synphot import SourceSpectrum, Empirical1D from . import image_plane_utils as imp_utils +from ..source.source_fields import SourceField from ..utils import from_currsys, quantify, quantity_from_table, get_logger @@ -21,8 +22,9 @@ def is_field_in_fov(fov_header, field, wcs_suffix=""): ---------- fov_header : fits.Header Header from a FieldOfView object - field : [astropy.Table, astropy.ImageHDU] - Field object from a Source object + field : [SourceField, astropy.Table, astropy.ImageHDU] + Field object from a Source object. Should now be SourceField, but bare + Table and ImageHDU still supported. wcs_suffix : str ["S", "D"] Coordinate system: Sky or Detector @@ -31,6 +33,11 @@ def is_field_in_fov(fov_header, field, wcs_suffix=""): is_inside_fov : bool """ + if isinstance(field, SourceField): + field = field.field + # TODO: Check if this can always get a SourceField, if yes then just do + # that and remove this. + if isinstance(field, fits.ImageHDU) and \ field.header.get("BG_SRC") is not None: is_inside_fov = True diff --git a/scopesim/optics/image_plane_utils.py b/scopesim/optics/image_plane_utils.py index 5bad9768..973153b5 100644 --- a/scopesim/optics/image_plane_utils.py +++ b/scopesim/optics/image_plane_utils.py @@ -52,7 +52,7 @@ def _get_headers(hdus_or_tables): else: raise TypeError( "hdu_or_table_list may only contain fits.ImageHDU, Table " - "or fits.Header, found {type(hdu_or_table)}.") + f"or fits.Header, found {type(hdu_or_table)}.") if tables: yield _make_bounding_header_for_tables(*tables, pixel_scale=pixel_scale) diff --git a/scopesim/optics/optical_train.py b/scopesim/optics/optical_train.py index 5ba1bfb3..04ade25a 100644 --- a/scopesim/optics/optical_train.py +++ b/scopesim/optics/optical_train.py @@ -7,6 +7,7 @@ import numpy as np from scipy.interpolate import interp1d from astropy import units as u +from astropy.wcs import WCS from tqdm import tqdm @@ -20,7 +21,7 @@ from ..detector import DetectorManager from ..effects import ExtraFitsKeywords from ..utils import from_currsys, top_level_catch, get_logger -from .. import rc, __version__ +from .. import __version__ logger = get_logger(__name__) @@ -275,9 +276,10 @@ def prepare_source(self, source): ImageHDU. """ # Convert to PHOTLAM per arcsec2 - # ..todo: this is not sufficiently general + # TODO: this is not sufficiently general + # TODO: Maybe move this to source_fields?? - for ispec, spec in enumerate(source.spectra): + for ispec, spec in source.spectra.items(): # Put on fov wavegrid wave_min = min(fov.meta["wave_min"] for fov in self.fov_manager.fovs) wave_max = max(fov.meta["wave_max"] for fov in self.fov_manager.fovs) @@ -348,6 +350,8 @@ def prepare_source(self, source): cube.header["CDELT3"] = dwave cube.header["CUNIT3"] = wave_unit.name + cube.wcs = WCS(cube.field) + return source @top_level_catch diff --git a/scopesim/rc.py b/scopesim/rc.py index 4028f8b2..e66492e4 100644 --- a/scopesim/rc.py +++ b/scopesim/rc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -"""Global configurations for ScopeSim.""" +"""Global configurations for ScopeSim (rc ... runtime configuration).""" from pathlib import Path import yaml diff --git a/scopesim/source/source.py b/scopesim/source/source.py index a1568787..21124bfc 100644 --- a/scopesim/source/source.py +++ b/scopesim/source/source.py @@ -32,29 +32,29 @@ # [WCS = CRPIXn, CRVALn = (0,0), CTYPEn, CDn_m, NAXISn, CUNITn """ -import pickle from copy import deepcopy from pathlib import Path -import numpy as np +from io import StringIO -from astropy.table import Table, Column -from astropy.io import ascii as ioascii +import numpy as np +from astropy.table import Table from astropy.io import fits from astropy import units as u from astropy.wcs import WCS -from synphot import SpectralElement +from synphot import SpectralElement, SourceSpectrum from ..optics.image_plane import ImagePlane from ..optics import image_plane_utils as imp_utils from .source_utils import validate_source_input, convert_to_list_of_spectra, \ photons_in_range from . import source_templates as src_tmp - from ..base_classes import SourceBase -from ..utils import (find_file, is_fits, get_fits_type, quantify, - quantity_from_table, convert_table_comments_to_dict, - close_loop, figure_factory, get_logger) +from .source_fields import (SourceField, TableSourceField, SpectrumSourceField, + HDUSourceField, ImageSourceField, CubeSourceField) +from ..utils import (find_file, is_fits, get_fits_type, + quantity_from_table, + figure_factory, get_logger) logger = get_logger(__name__) @@ -62,7 +62,7 @@ class Source(SourceBase): """ - Create a source object from a file or from arrays + Create a source object from a file or from arrays. A Source object must consist of a spatial and a spectral description of the on-sky source. Many sources can be added together and kept @@ -130,8 +130,8 @@ class Source(SourceBase): fields : list The spatial distribution of the on-sky source, either as ``fits.ImageHDU`` or ``astropy.Table`` objects - spectra : list of ``synphot.SourceSpectrum`` objects - List of spectra associated with the fields + spectra : dict of ``synphot.SourceSpectrum`` objects + Dictionary of spectra associated with the fields meta : dict Dictionary of extra information about the source @@ -145,89 +145,83 @@ def __init__(self, filename=None, cube=None, ext=0, lam=None, spectra=None, x=None, y=None, ref=None, weight=None, table=None, image_hdu=None, flux=None, **kwargs): - self.meta = {} - self.meta.update(kwargs) - # ._meta_dicts contains a meta for each of the .fields. It is primarily - # used to set proper FITS header keywords for each field so the source - # can be reconstructed from the FITS headers. - self._meta_dicts = [self.meta] - - self.fields = [] - self.spectra = [] + self._meta = {} + self.fields: list[SourceField] = [] - self.bandpass = None + self._bandpass: SpectralElement | None = None - validate_source_input(lam=lam, x=x, y=y, ref=ref, weight=weight, - spectra=spectra, table=table, cube=cube, - ext=ext, image_hdu=image_hdu, flux=flux, - filename=filename) + # The rest of these is not implemented in validate_source_input + # validate_source_input(lam=lam, x=x, y=y, ref=ref, weight=weight, + # spectra=spectra, table=table, cube=cube, + # ext=ext, image_hdu=image_hdu, flux=flux, + # filename=filename) + validate_source_input(table=table, image_hdu=image_hdu, filename=filename) if spectra is not None: - spectra = convert_to_list_of_spectra(spectra, lam) + spectra = {i: spec for i, spec in + enumerate(convert_to_list_of_spectra(spectra, lam))} if filename is not None and spectra is not None: - self._from_file(filename, spectra, flux) + self._from_file(filename, spectra, flux, **kwargs) elif cube is not None: - self._from_cube(cube=cube, ext=ext) + self._from_cube(cube=cube, ext=ext, **kwargs) elif table is not None and spectra is not None: - self._from_table(table, spectra) + self._from_table(table, spectra, **kwargs) elif image_hdu is not None and spectra is not None: - self._from_imagehdu_and_spectra(image_hdu, spectra) + self._from_imagehdu_and_spectra(image_hdu, spectra, **kwargs) elif image_hdu is not None and flux is not None: - self._from_imagehdu_and_flux(image_hdu, flux) + self._from_imagehdu_and_flux(image_hdu, flux, **kwargs) elif image_hdu is not None and flux is None and spectra is None: if image_hdu.header.get("BUNIT") is not None: - self._from_imagehdu_only(image_hdu) + self._from_imagehdu_only(image_hdu, **kwargs) else: - msg = ("image_hdu must be accompanied by either spectra or flux:\n" - f"spectra: {spectra}, flux: {flux}") - logger.exception(msg) + msg = ("image_hdu must be accompanied by either spectra or " + f"flux:\n spectra: {spectra}, flux: {flux}") raise ValueError(msg) elif x is not None and y is not None and \ ref is not None and spectra is not None: - self._from_arrays(x, y, ref, weight, spectra) + self._from_arrays(x, y, ref, weight, spectra, **kwargs) + + self.meta.update(kwargs) + + def _from_file(self, filename, spectra, flux, **kwargs): + assert not self.fields, "Constructor method must act on empty instance!" - def _from_file(self, filename, spectra, flux): filename = find_file(filename) - if is_fits(filename): - fits_type = get_fits_type(filename) - data = fits.getdata(filename) - hdr = fits.getheader(filename) - hdr["FILENAME"] = Path(filename).name - if fits_type == "image": - image = fits.ImageHDU(data=data, header=hdr) - if spectra is not None: - self._from_imagehdu_and_spectra(image, spectra) - elif flux is not None: - self._from_imagehdu_and_flux(image, flux) - else: - self._from_imagehdu_only(image) - elif fits_type == "bintable": - hdr1 = fits.getheader(filename, 1) - hdr.update(hdr1) - tbl = Table(data, meta=dict(hdr)) - tbl.meta.update(convert_table_comments_to_dict(tbl)) - self._from_table(tbl, spectra) + if not is_fits(filename) or get_fits_type(filename) == "bintable": + self.fields = [ + TableSourceField.from_file(filename, spectra, **kwargs) + ] + return + + data = fits.getdata(filename) + hdr = fits.getheader(filename) + hdr["FILENAME"] = Path(filename).name + image = fits.ImageHDU(data=data, header=hdr) + if spectra is not None: + self._from_imagehdu_and_spectra(image, spectra, **kwargs) + elif flux is not None: + self._from_imagehdu_and_flux(image, flux, **kwargs) else: - tbl = ioascii.read(filename) - tbl.meta.update(convert_table_comments_to_dict(tbl)) - self._from_table(tbl, spectra) - - def _from_table(self, tbl, spectra): - if "weight" not in tbl.colnames: - tbl.add_column(Column(name="weight", data=np.ones(len(tbl)))) - tbl["ref"] += len(self.spectra) - self.fields.append(tbl) - self.spectra += spectra - - def _from_imagehdu_and_spectra(self, image_hdu, spectra): + self._from_imagehdu_only(image, **kwargs) + + def _from_table(self, tbl: Table, spectra: dict[int, SourceSpectrum], + **kwargs): + assert not self.fields, "Constructor method must act on empty instance!" + self.fields = [TableSourceField(tbl, spectra, meta=kwargs)] + + def _from_imagehdu_and_spectra(self, image_hdu, + spectra: dict[int, SourceSpectrum], + **kwargs): + assert not self.fields, "Constructor method must act on empty instance!" + if not image_hdu.header.get("BG_SRC"): pass # FIXME: This caused more problems than it solved! @@ -253,25 +247,25 @@ def _from_imagehdu_and_spectra(self, image_hdu, spectra): image_hdu = fits.ImageHDU(data=image_hdu.data, header=image_hdu.header) - if spectra is not None and len(spectra) > 0: - image_hdu.header["SPEC_REF"] = len(self.spectra) - self.spectra += spectra - else: - image_hdu.header["SPEC_REF"] = "" - logger.warning("No spectrum was provided. SPEC_REF set to ''. " - "This could cause problems later") - raise NotImplementedError + if not spectra: + raise ValueError("No spectrum was provided.") + + # image_hdu.header["SPEC_REF"] = len(self.spectra) + assert len(spectra) == 1, f"_from_imagehdu_and_spectra needs single spectrum, ref was {image_hdu.header.get('SPEC_REF')}" + image_hdu.header["SPEC_REF"] = 0 for i in [1, 2]: # Do not test for CUNIT or CDELT so that it throws an exception - unit = u.Unit(image_hdu.header["CUNIT"+str(i)].lower()) - val = float(image_hdu.header["CDELT"+str(i)]) - image_hdu.header["CUNIT"+str(i)] = "deg" - image_hdu.header["CDELT"+str(i)] = val * unit.to(u.deg) + unit = u.Unit(image_hdu.header[f"CUNIT{i}"].lower()) + val = float(image_hdu.header[f"CDELT{i}"]) + image_hdu.header[f"CUNIT{i}"] = "deg" + image_hdu.header[f"CDELT{i}"] = val * unit.to(u.deg) - self.fields.append(image_hdu) + self.fields = [ImageSourceField(image_hdu, spectra, meta=kwargs)] + + def _from_imagehdu_and_flux(self, image_hdu, flux, **kwargs): + assert not self.fields, "Constructor method must act on empty instance!" - def _from_imagehdu_and_flux(self, image_hdu, flux): if isinstance(flux, u.Unit): flux = 1 * flux @@ -281,10 +275,12 @@ def _from_imagehdu_and_flux(self, image_hdu, flux): spec_template = src_tmp.ab_spectrum flux = flux.to(u.ABmag) flux = flux.value - spectra = [spec_template(flux)] - self._from_imagehdu_and_spectra(image_hdu, spectra) + spectra = {0: spec_template(flux)} + self._from_imagehdu_and_spectra(image_hdu, spectra, **kwargs) + + def _from_imagehdu_only(self, image_hdu, **kwargs): + assert not self.fields, "Constructor method must act on empty instance!" - def _from_imagehdu_only(self, image_hdu): bunit = image_hdu.header.get("BUNIT") try: bunit = u.Unit(bunit) @@ -296,23 +292,17 @@ def _from_imagehdu_only(self, image_hdu): ">>> Source(image_hdu=..., flux=u.Unit(bunit), ...)", bunit) value = 0 if bunit in [u.mag, u.ABmag] else 1 - self._from_imagehdu_and_flux(image_hdu, value * bunit) - - def _from_arrays(self, x, y, ref, weight, spectra): - if weight is None: - weight = np.ones(len(x)) + self._from_imagehdu_and_flux(image_hdu, value * bunit, **kwargs) - x = quantify(x, u.arcsec) - y = quantify(y, u.arcsec) - tbl = Table(names=["x", "y", "ref", "weight"], - data=[x, y, np.array(ref) + len(self.spectra), weight]) - tbl.meta["x_unit"] = "arcsec" - tbl.meta["y_unit"] = "arcsec" + def _from_arrays(self, x, y, ref, weight, + spectra: dict[int, SourceSpectrum], + **kwargs): + assert not self.fields, "Constructor method must act on empty instance!" + self.fields = [ + TableSourceField.from_arrays(x, y, ref, weight, spectra, **kwargs) + ] - self.fields.append(tbl) - self.spectra += spectra - - def _from_cube(self, cube, ext=0): + def _from_cube(self, cube, ext=0, **kwargs): """ Parameters @@ -322,11 +312,13 @@ def _from_cube(self, cube, ext=0): the extension where the cube is located if applicable. """ + assert not self.fields, "Constructor method must act on empty instance!" + if isinstance(cube, fits.HDUList): - data = cube[ext].data - header = cube[ext].header - wcs = WCS(cube[ext], fobj=cube) - elif isinstance(cube, (fits.PrimaryHDU, fits.ImageHDU)): + self.fields = [CubeSourceField.from_hdulist(cube, ext, **kwargs)] + return + + if isinstance(cube, (fits.PrimaryHDU, fits.ImageHDU)): data = cube.data header = cube.header wcs = WCS(cube) @@ -366,34 +358,73 @@ def _from_cube(self, cube, ext=0): cube_hdu = fits.ImageHDU(data=target_cube, header=target_hdr) cube_hdu.wave = wave # ..todo: review wave attribute, bad practice - self.fields.append(cube_hdu) + self.fields = [CubeSourceField(cube_hdu, meta=kwargs)] + + def _get_fields(self, subclass): + """Yield fields of specific subclass.""" + for field in self.fields: + if isinstance(field, subclass): + yield field @property def table_fields(self): - """List of fields that are defined through tables""" - fields = [field for field in self.fields if isinstance(field, Table)] - return fields + """List of fields that are defined through tables.""" + return list(self._get_fields(TableSourceField)) @property def image_fields(self): - """List of fields that are defined through two-dimensional images""" - fields = [field for field in self.fields if - isinstance(field, fits.ImageHDU) and field.header["NAXIS"] == 2] - return fields + """List of fields that are defined through 2D images.""" + return list(self._get_fields(ImageSourceField)) @property def cube_fields(self): - """List of fields that are defined through three-dimensional cubes""" - fields = [field for field in self.fields if - isinstance(field, fits.ImageHDU) and field.header["NAXIS"] == 3] - return fields + """List of fields that are defined through 3D datacubes.""" + return list(self._get_fields(CubeSourceField)) + + @property + def spectra(self): + spectra = {} + for fld in self.fields: + if not isinstance(fld, SpectrumSourceField): + continue + try: + spectra.update(fld.spectra) + except TypeError: + logger.error("Error adding fields spectra.") + return spectra + + @spectra.setter + def spectra(self, value): + logger.error("spectra setting is deprecated") + pass + + @property + def meta(self): + if len(self.fields) == 1: + # Some code (mostly in ScopeSim_Templates) relies on single-field + # sources exposing their field meta as the Source's meta. + # If there's more than one field, the metas should always be + # accessed through the fields to avoid ambiguity. + return self.fields[0].meta + return self._meta + + @property + def bandpass(self): + return self._bandpass + + @bandpass.setter + def bandpass(self, bandpass): + if not isinstance(bandpass, SpectralElement): + raise ValueError("type(bandpass) must be synphot.SpectralElement") + + self._bandpass = bandpass # ..todo: rewrite this method def image_in_range(self, wave_min, wave_max, pixel_scale=1*u.arcsec, layers=None, area=None, spline_order=1, sub_pixel=False): if layers is None: layers = range(len(self.fields)) - fields = [self.fields[ii] for ii in layers] + fields = [self.fields[ii].field for ii in layers] hdr = imp_utils.get_canvas_header(fields, pixel_scale=pixel_scale) im_plane = ImagePlane(hdr) @@ -436,7 +467,7 @@ def image_in_range(self, wave_min, wave_max, pixel_scale=1*u.arcsec, return im_plane - def photons_in_range(self, wave_min, wave_max, area=None, indexes=None): + def photons_in_range(self, wave_min, wave_max, area=None, indices=None): """ Parameters @@ -447,7 +478,7 @@ def photons_in_range(self, wave_min, wave_max, area=None, indexes=None): [um] area : float, u.Quantity, optional [m2] - indexes : list of integers, optional + indices : list of integers, optional Returns ------- @@ -456,10 +487,10 @@ def photons_in_range(self, wave_min, wave_max, area=None, indexes=None): [ph / s] if area is passed """ - if indexes is None: - indexes = range(len(self.spectra)) + if indices is None: + indices = self.spectra.keys() - spectra = [self.spectra[ii] for ii in indexes] + spectra = [self.spectra[ii] for ii in indices] counts = photons_in_range(spectra, wave_min, wave_max, area=area, bandpass=self.bandpass) return counts @@ -470,31 +501,21 @@ def fluxes(self, wave_min, wave_max, **kwargs): def image(self, wave_min, wave_max, **kwargs): return self.image_in_range(wave_min, wave_max, **kwargs) - @classmethod - def load(cls, filename): - """Load :class:'.Source' object from filename""" - with open(filename, "rb") as fp1: - src = pickle.load(fp1) - return src - - def dump(self, filename): - """Save to filename as a pickle""" - with open(filename, "wb") as fp1: - pickle.dump(self, fp1) - - # def collapse_spectra(self, wave_min=None, wave_max=None): - # for spec in self.spectra: - # waves = spec.waveset - # if wave_min is not None and wave_max is not None: - # mask = (waves >= wave_min) * (waves <= wave_max) - # waves = waves[mask] - # fluxes = spec(waves) - # spec = SourceSpectrum(Empirical1D, points=waves, - # lookup_table=fluxes) - - def shift(self, dx=0, dy=0, layers=None): + # @classmethod + # def load(cls, filename): + # """Load :class:'.Source' object from filename""" + # with open(filename, "rb") as fp1: + # src = pickle.load(fp1) + # return src + + # def dump(self, filename): + # """Save to filename as a pickle""" + # with open(filename, "wb") as fp1: + # pickle.dump(self, fp1) + + def shift(self, dx: float = 0, dy: float = 0, layers=None) -> None: """ - Shifts the position of one or more fields w.r.t. the optical axis + Shift the position of one or more fields w.r.t. the optical axis. Parameters ---------- @@ -505,62 +526,44 @@ def shift(self, dx=0, dy=0, layers=None): """ if layers is None: - layers = np.arange(len(self.fields)) + layers = range(len(self.fields)) for ii in layers: - if isinstance(self.fields[ii], Table): - x = quantity_from_table("x", self.fields[ii], u.arcsec) - x += quantify(dx, u.arcsec) - self.fields[ii]["x"] = x - - y = quantity_from_table("y", self.fields[ii], u.arcsec) - y += quantify(dy, u.arcsec) - self.fields[ii]["y"] = y - elif isinstance(self.fields[ii], (fits.ImageHDU, fits.PrimaryHDU)): - dx = quantify(dx, u.arcsec).to(u.deg) - dy = quantify(dy, u.arcsec).to(u.deg) - self.fields[ii].header["CRVAL1"] += dx.value - self.fields[ii].header["CRVAL2"] += dy.value + self.fields[ii].shift(dx, dy) def rotate(self, angle, offset=None, layers=None): pass - def add_bandpass(self, bandpass): - if not isinstance(bandpass, SpectralElement): - raise ValueError("type(bandpass) must be synphot.SpectralElement") - - self.bandpass = bandpass - def plot(self): """ - Plot the location of source components + Plot the location of source components. - Source components instantiated from 2d or 3d ImageHDUs are represented by their - spatial footprint. Source components instantiated from tables are shown as points. + Source components instantiated from 2d or 3d ImageHDUs are represented + by their spatial footprint. Source components instantiated from tables + are shown as points. """ _, axes = figure_factory() colours = "rgbcymk" * (len(self.fields) // 7 + 1) for col, field in zip(colours, self.fields): - if isinstance(field, Table): - axes.plot(field["x"], field["y"], col+".") - elif isinstance(field, (fits.ImageHDU, fits.PrimaryHDU)): - xypts = imp_utils.calc_footprint(field.header) - convf = u.Unit(field.header["CUNIT1"]).to(u.arcsec) - outline = np.array(list(close_loop(xypts))) * convf - axes.plot(outline[:, 0], outline[:, 1], col) - axes.set_xlabel("x [arcsec]") - axes.set_ylabel("y [arcsec]") + field.plot(axes, col) axes.set_aspect("equal") + axes.set_xlabel("x [arcsec]") + axes.set_ylabel("y [arcsec]") + axes.legend() + return axes def make_copy(self): new_source = Source() - new_source.meta = deepcopy(self.meta) - new_source._meta_dicts = deepcopy(self._meta_dicts) - new_source.spectra = deepcopy(self.spectra) + new_source._meta = deepcopy(self.meta) + # new_source.spectra = deepcopy(self.spectra) for field in self.fields: - if isinstance(field, (fits.ImageHDU, fits.PrimaryHDU)) \ - and field._file is not None: # and field._data_loaded is False: + # new_source.fields.append(deepcopy(field)) + # TODO: The code below refers to DataContainer?? + # FIXME: Omitting this causes "TypeError: cannot pickle '_io.BufferedReader' object" + # for fields loaded from FITS files. Should be properly fixed!! + if (isinstance(field.field, (fits.ImageHDU, fits.PrimaryHDU)) + and field.field._file is not None): # and field._data_loaded is False: new_source.fields.append(field) else: new_source.fields.append(deepcopy(field)) @@ -568,30 +571,32 @@ def make_copy(self): return new_source def append(self, source_to_add): + if not isinstance(source_to_add, Source): + raise ValueError(f"Cannot add {type(source_to_add)} object to Source object") + new_source = source_to_add.make_copy() - # If there is no field yet, then self._meta_dicts contains a - # reference to self.meta, which is empty. This ensures that both are - # updated at the same time. However, it is important that the fields - # and _meta_dicts match when appending sources. - if len(self.fields) == 0: - assert self._meta_dicts == [{}] - self._meta_dicts = [] - if isinstance(source_to_add, Source): - for field in new_source.fields: - if isinstance(field, Table): - field["ref"] += len(self.spectra) - self.fields.append(field) - - elif isinstance(field, (fits.ImageHDU, fits.PrimaryHDU)): - if ("SPEC_REF" in field.header and + + # FIXME: This offset should not be required, now that spectra for each + # field are stored in that field. However, there is some code + # that loops over the combined Source.spectra dict and that + # fails if the keys in the field's spectra contain duplicates. + # This need to be fixed ASAP! + specrefoffset = max(self.spectra.keys()) + 1 if self.spectra else 0 + for field in new_source.fields: + if isinstance(field, TableSourceField): + field.field["ref"] += specrefoffset + self.fields.append(field) + + elif isinstance(field, HDUSourceField): + if ("SPEC_REF" in field.header and isinstance(field.header["SPEC_REF"], int)): - field.header["SPEC_REF"] += len(self.spectra) - self.fields.append(field) - self.spectra += new_source.spectra + field.field.header["SPEC_REF"] += specrefoffset + self.fields.append(field) - self._meta_dicts += source_to_add._meta_dicts - else: - raise ValueError(f"Cannot add {type(new_source)} object to Source object") + field.spectra = {k + specrefoffset: v for k, v in + new_source.spectra.items()} + + self.meta.update(new_source.meta) def __add__(self, new_source): self_copy = self.make_copy() @@ -601,20 +606,10 @@ def __add__(self, new_source): def __radd__(self, new_source): return self.__add__(new_source) - def __repr__(self): - msg = "" - for ifld, fld in enumerate(self.fields): - if isinstance(fld, Table): - tbl_len = len(fld) - num_spec = set(fld["ref"]) - msg += f"[{ifld}]: Table with {tbl_len} rows, referencing spectra {num_spec} \n" - elif isinstance(fld, (fits.ImageHDU, fits.PrimaryHDU)): - im_size = fld.data.shape if fld.data is not None else "" - num_spec = "-" - msg += f"[{ifld}]: ImageHDU with size {im_size}" - if "SPEC_REF" in self.fields[ifld].header: - num_spec = self.fields[ifld].header["SPEC_REF"] - msg += f", referencing spectrum {num_spec}" - msg += "\n" - - return msg + def __repr__(self) -> str: + with StringIO() as str_stream: + for ifld, fld in enumerate(self.fields): + str_stream.write(f"[{ifld}]: ") + fld._write_stream(str_stream) + str_stream.write("\n") + return str_stream.getvalue() diff --git a/scopesim/source/source_fields.py b/scopesim/source/source_fields.py new file mode 100644 index 00000000..7db133cb --- /dev/null +++ b/scopesim/source/source_fields.py @@ -0,0 +1,317 @@ +# -*- coding: utf-8 -*- +""" +Contains ``SourceField`` and its subclasses. + +While the ``Source`` object serves as the high-level interface between target +descriptions and the ScopeSim optical train, the actual information about the +observed objects is stored in the ``SourceField`` classes, which constitute the +members of ``Source.fields`` collection. Any target to be understood by +ScopeSim can be characterized by either a ``Table`` of point sources, a +two-dimensional image (``ImageHDU``) plus a separate (averaged) spectrum, or a +three-dimensional datacube containing spectral information for each spatial +pixel. This threefold abstraction is mirrored by the three final subclasses of +``SourceField``: ``TableSourceField`` for point source tables with a spectrum +reference for each individual point source, ``ImageSourceField`` for a 2D image +with an average spectrum, and finally ``CubeSourceField`` with a full 3D data +cube. The ``ImageSourceField`` and ``CubeSourceField`` also contain a ``WCS`` +coordinate information and the wavelength axis of the ``CubeSourceField`` is +available via the ``CubeSourceField.wave`` attribute. + +In previous versions of ScopeSim (pre-0.9), the ``Source.fields`` collection +simply held the individual ``Table`` and ``ImageHDU`` (2D or 3D) objects, which +are now stored in the ``.field`` attribute of each source field. This new +distinction of the different cases allows much clearer separation of the logic +required to handle various operations on those objects, such as plotting and +shifting the source, which previously had to incorporate a number of case +differentiations that made the ``Source`` class rather overloaded with logic. +This now also allows for well-structured validation logic of the individual +source field data upon creation of each ``SourceField`` subclass instance. + +Creation of the source field classes is usually handled by the ``Source`` class +itself via its various constructions methods, so the user rarely interacts with +these classes directly, except for debugging. They serve more as an internal +abstraction layer to handle the different cases of target object descriptions, +as described above. + +The following class diagram illustrates the relationship between the +``SourceField`` subclasses: + +```mmd +classDiagram +class SourceField{+field} +class SpectrumSourceField{+spectra} +class HDUSourceField{+wcs} + +SourceField <|-- SpectrumSourceField +SourceField <|-- HDUSourceField +SpectrumSourceField <|-- TableSourceField +SpectrumSourceField <|-- ImageSourceField +HDUSourceField <|-- ImageSourceField +HDUSourceField <|-- CubeSourceField +``` + +""" + +from copy import deepcopy +from pathlib import Path +from typing import TextIO, Any +from dataclasses import dataclass, KW_ONLY, field as dataclass_field +# rename it dataclass_field to avoid confusion with source field + +import numpy as np + +from astropy.table import Table, Column +from astropy.io.registry import IORegistryError +from astropy.io import fits +from astropy import units as u +from astropy.wcs import WCS, SingularMatrixError, FITSFixedWarning + +from synphot import SourceSpectrum + + +from ..optics import image_plane_utils as imp_utils +from ..utils import (quantify, quantity_from_table, close_loop, get_logger, + convert_table_comments_to_dict) + + +logger = get_logger(__name__) + + +# TODO: consider making this a metaclass +@dataclass +class SourceField: + """Base class for source fields, not meant to be instantiated.""" + + field: Any + _: KW_ONLY + meta: dict = dataclass_field(default_factory=dict, repr=False) + + def _write_stream(self, stream: TextIO) -> None: + raise NotImplementedError("Subclasses should implement this.") + + def __getitem__(self, key): + # For backwards-combatibility to allow direct access of + # Source.fields[x][y] if possible. Maybe in the long run get rid of + # this and force the use of .field... + # warn("Direct item access for source fields may become deprecated " + # "in the future. Use the .field attribute instead.", + # PendingDeprecationWarning, stacklevel=2) + return self.field.__getitem__(key) + + def __setitem__(self, key, value): + # For backwards-combatibility to allow direct access of + # Source.fields[x][y] if possible. Maybe in the long run get rid of + # this and force the use of .field... + # warn("Direct item assignment for source fields may become deprecated " + # "in the future. Use the .field attribute instead.", + # PendingDeprecationWarning, stacklevel=2) + self.field.__setitem__(key, value) + + @property + def name(self) -> str: + """Name of the object (if set).""" + return self.meta.get("object", "") + + +@dataclass +class SpectrumSourceField(SourceField): + """Base class for source fields with separate spectra (no cube).""" + + spectra: dict + + +@dataclass +class TableSourceField(SpectrumSourceField): + """Source field with table of point source(s).""" + + field: Table + + @classmethod + def from_file(cls, filename: Path | str, + spectra: dict[int, SourceSpectrum], + **kwargs): + """Load source table from file.""" + try: + tbl = Table.read(filename) + # There used to be a header combining functionality here... + # hdr1 = fits.getheader(filename, 1) + # hdr.update(hdr1) + # tbl = Table(data, meta=dict(hdr)) + # tbl.meta.update(convert_table_comments_to_dict(tbl)) + except IORegistryError: + logger.debug("Table format guessing failed, retry with ascii.") + tbl = Table.read(filename, format="ascii") + + tbl.meta.update(convert_table_comments_to_dict(tbl)) + return cls(tbl, spectra=spectra, meta=kwargs) + + @classmethod + def from_arrays(cls, x, y, ref, weight, + spectra: dict[int, SourceSpectrum], + **kwargs): + """Construct source table from arrays for each column.""" + if weight is None: + weight = np.ones(len(x)) + + x = quantify(x, u.arcsec) + y = quantify(y, u.arcsec) + tbl = Table(names=["x", "y", "ref", "weight"], + data=[x, y, ref, weight]) + tbl.meta["x_unit"] = "arcsec" + tbl.meta["y_unit"] = "arcsec" + return cls(tbl, spectra=spectra, meta=kwargs) + + def __post_init__(self): + """Validate input.""" + assert self.spectra, "Spectra must be non-empty for table source." + if "weight" not in self.field.colnames: + self.field.add_column( + Column(name="weight", data=np.ones(len(self.field))) + ) + self.meta.update(self.field.meta) + + def __len__(self) -> int: + """Return len(self).""" + return len(self.field) + + def _write_stream(self, stream: TextIO) -> None: + stream.write(f"Table with {len(self)} rows, referencing " + f"spectra {set(self.spectra)}") + + def plot(self, axes, color) -> None: + """Plot source.""" + axes.plot(self.field["x"], self.field["y"], color+".", label=self.name) + + def shift(self, dx, dy) -> None: + """Shift source by dx, dy.""" + x = quantity_from_table("x", self.field, u.arcsec) + x += quantify(dx, u.arcsec) + self.field["x"] = x + + y = quantity_from_table("y", self.field, u.arcsec) + y += quantify(dy, u.arcsec) + self.field["y"] = y + + +@dataclass +class HDUSourceField(SourceField): + """Base class for source fields with HDU.""" + + field: fits.ImageHDU + wcs: WCS | None = dataclass_field(default=None, init=False) + + def __new__(cls, *args, **kwargs): + """Override creation to create subclasses.""" + if issubclass(cls, (CubeSourceField, ImageSourceField)): + # Allow for direct subclass access + return super().__new__(cls) + + field = kwargs.get("field", args[0]) + if field.header["NAXIS"] == 3: + return super().__new__(CubeSourceField) + if field.header["NAXIS"] == 2: + return super().__new__(ImageSourceField) + + # If we get here, something went wrong + raise TypeError(f"{field.header['NAXIS'] = } must be 2 or 3.") + + @property + def header(self) -> fits.Header: + """Shortcut for `field.header`.""" + return self.field.header + + @property + def data(self) -> np.ndarray: + """Shortcut for `field.data`.""" + return self.field.data + + @data.setter + def data(self, value): + self.field.data = value + + @property + def img_size(self) -> str: + """Shortcut for `field.data.shape`.""" + if self.data is None: + return "" + return str(self.data.shape) + + def _write_stream(self, stream: TextIO) -> None: + stream.write(f"ImageHDU with size {self.img_size}, referencing " + f"spectrum {self.field.header.get('SPEC_REF', '-')}") + + def plot(self, axes, color) -> None: + """Plot source.""" + xypts = imp_utils.calc_footprint(self.header) + convf = u.Unit(self.header["CUNIT1"]).to(u.arcsec) + outline = np.array(list(close_loop(xypts))) * convf + axes.plot(*outline.T, color, label=self.name) + + def shift(self, dx, dy) -> None: + """Shift source by dx, dy.""" + dx = dx << u.arcsec << self.wcs.wcs.cunit[0] + dy = dy << u.arcsec << self.wcs.wcs.cunit[1] + self.header["CRVAL1"] += dx.value + self.header["CRVAL2"] += dy.value + # TODO: self.wcs should be updated here! + + +@dataclass +class ImageSourceField(SpectrumSourceField, HDUSourceField): + """Source field with 2D image and a single (average) spectrum.""" + + def __post_init__(self): + """Validate input.""" + assert self.spectra, "Spectra must be non-empty for 2D image source." + try: + self.wcs = WCS(self.field) + except (SingularMatrixError, FITSFixedWarning): + # This occurs for BG SRC + logger.debug("Couldn't create source field WCS.") + self.wcs = None + + +@dataclass +class CubeSourceField(HDUSourceField): + """Source field with 3D data cube.""" + + from_hdul: bool = False + + def __post_init__(self): + """Validate input.""" + if self.wcs is None and not self.from_hdul: + self.wcs = WCS(self.field) + + try: + bunit = u.Unit(self.header["BUNIT"]) + except KeyError: + bunit = u.erg / u.s / u.cm**2 / u.arcsec**2 + logger.warning( + "Keyword \"BUNIT\" not found, setting to %s by default", bunit) + except ValueError as error: + logger.error("\"BUNIT\" keyword is malformed: %s", error) + raise + self.field.header["BUNIT"] = str(bunit) + + @classmethod + def from_hdulist(cls, hdulist: fits.HDUList, ext: int = 0, **kwargs): + """Load source cube from HDUL.""" + cube = fits.ImageHDU(header=hdulist[ext].header.copy(), + data=deepcopy(hdulist[ext].data)) + new_csf = cls(field=cube, meta=kwargs, from_hdul=True) + new_csf.wcs = WCS(hdulist[ext], fobj=hdulist) + return new_csf + + def shift(self, dx, dy) -> None: + """Shift source by dx, dy.""" + logger.warning( + "Source shift for cubes assumes first two axes are celestial.") + super().shift(dx, dy) + + @property + def wave(self) -> u.Quantity: + """Construct wavelength axis for cube in um.""" + swcs = self.wcs.spectral + with u.set_enabled_equivalencies(u.spectral()): + wave = swcs.pixel_to_world(np.arange(swcs.pixel_shape[0])) << u.um + return wave diff --git a/scopesim/source/source_utils.py b/scopesim/source/source_utils.py index b58a1f65..cc2dfa8b 100644 --- a/scopesim/source/source_utils.py +++ b/scopesim/source/source_utils.py @@ -1,4 +1,7 @@ +# -*- coding: utf-8 -*- +from typing import Optional, Union from collections.abc import Iterable +from pathlib import Path import numpy as np from astropy import wcs, units as u @@ -6,143 +9,187 @@ from astropy.table import Table from synphot import SourceSpectrum, Empirical1D, SpectralElement -from ..utils import find_file, quantify, get_logger +from ..utils import find_file, get_logger, convert_table_comments_to_dict logger = get_logger(__name__) -def validate_source_input(**kwargs): - if "filename" in kwargs and kwargs["filename"] is not None: - filename = kwargs["filename"] - if find_file(filename) is None: - logger.warning("filename was not found: %s", filename) - - if "image" in kwargs and kwargs["image"] is not None: - image_hdu = kwargs["image"] - if not isinstance(image_hdu, (fits.PrimaryHDU, fits.ImageHDU)): - raise ValueError("image must be fits.HDU object with a WCS." - f"{type(image_hdu) = }") - - if len(wcs.find_all_wcs(image_hdu.header)) == 0: - logger.warning("image does not contain valid WCS. %s", - wcs.WCS(image_hdu)) - - if "table" in kwargs and kwargs["table"] is not None: - tbl = kwargs["table"] - if not isinstance(tbl, Table): - raise ValueError("table must be an astropy.Table object:" - f"{type(tbl) = }") - - if not np.all([col in tbl.colnames for col in ["x", "y", "ref"]]): - raise ValueError("table must contain at least column names: " - f"'x, y, ref': {tbl.colnames}") - - return True - +def validate_source_input(**kwargs) -> None: + """ + Check validity of kwargs passed to ``Source`` object. -def convert_to_list_of_spectra(spectra, lam): - spectra_list = [] - if isinstance(spectra, SourceSpectrum): - spectra_list += [spectra] + Currently checks for "filename", "image" and "table", raising the + exceptions listed below. Additionally logs a warning if no WCS is found in + an image, or if a given filename cannot be found. - elif lam is None and\ - isinstance(spectra, (tuple, list)) and \ - isinstance(spectra[0], SourceSpectrum): - spectra_list += spectra + Parameters + ---------- + **kwargs : TYPE + DESCRIPTION. - elif lam is not None and len(spectra.shape) == 1 and \ - isinstance(spectra, np.ndarray) and \ - isinstance(lam, np.ndarray): - spec = SourceSpectrum(Empirical1D, points=lam, lookup_table=spectra) - spectra_list += [spec] + Raises + ------ + TypeError + Raised if an image isn't a FITS HDU or a table isn't an astropy Table. + ValueError + Raised if a table does not contain the minimum required columns. - elif ((isinstance(spectra, np.ndarray) and - len(spectra.shape) == 2) or - (isinstance(spectra, (list, tuple)) and - isinstance(spectra[0], np.ndarray))) and \ - isinstance(lam, np.ndarray): + Returns + ------- + None - for sp in spectra: - spec = SourceSpectrum(Empirical1D, points=lam, lookup_table=sp) - spectra_list += [spec] + """ + if (filename := kwargs.get("filename")) is not None: + if find_file(filename) is None: + logger.warning("filename was not found: %s", filename) - return spectra_list + if (image_hdu := kwargs.get("image")) is not None: + if not isinstance(image_hdu, (fits.PrimaryHDU, fits.ImageHDU)): + raise TypeError( + f"Image must be fits.HDU object: {type(image_hdu) = }") + if not wcs.find_all_wcs(image_hdu.header): + logger.warning( + "Image does not contain valid WCS. %s", wcs.WCS(image_hdu)) -def photons_in_range(spectra, wave_min, wave_max, area=None, bandpass=None): + if (tbl := kwargs.get("table")) is not None: + if not isinstance(tbl, Table): + raise TypeError( + f"Table must be astropy.Table object: {type(tbl) = }") + + if not {"x", "y", "ref"}.issubset(tbl.colnames): + raise ValueError( + "Table must contain at least the following column names: 'x', " + f"""'y', 'ref'; found only: '{"', '".join(tbl.colnames)}'""") + # TODO py312: The triple quotes will become redundant in 3.12 ! + + +def convert_to_list_of_spectra(spectra, lam) -> list[SourceSpectrum]: + """Produce SourceSpectrum instances or pass them through.""" + def _synphotify(spec): + if not isinstance(lam, np.ndarray): + raise TypeError("If spectra is/are given as array(s), lam must be " + "an array as well.") + return SourceSpectrum(Empirical1D, points=lam, lookup_table=spec) + + def _from_arrays(specarrays): + for spec in specarrays: + yield _synphotify(spec) + + def _get_list(): + if isinstance(spectra, SourceSpectrum): + yield spectra + return + + if (isinstance(spectra, Iterable) and + not isinstance(spectra, np.ndarray)): + _spectra = list(spectra) # avoid eating iterators in all() + if all(isinstance(spec, SourceSpectrum) for spec in _spectra): + yield from _spectra + elif all(isinstance(spec, np.ndarray) for spec in _spectra): + yield from _from_arrays(_spectra) + else: + raise ValueError( + "If given as an iterable, spectra must consist of all " + "synphot spectra or all arrays") + return + + if isinstance(spectra, np.ndarray): + if spectra.ndim == 1: + yield _synphotify(spectra) + elif spectra.ndim == 2: + yield from _from_arrays(spectra) + else: + raise ValueError( + "If given as an array, spectra must have either 1 (single " + "flux list) or 2 (flux of multiple spectra) dimensions, " + f"but {spectra.ndim} were found.") + return + + return list(_get_list()) + + +# FIXME: typing: This should work with the more general (and true, since +# conversion is done anyway) Quantity["length"] (or "area" resp.), but +# doing so currently causes a NameError. Not sure what's going on. +def photons_in_range( + spectra: SourceSpectrum, + wave_min: u.Quantity[u.um] | float, + wave_max: u.Quantity[u.um] | float, + area: Optional[Union[u.Quantity[u.m**2], float]] = None, + bandpass: Optional[SpectralElement] = None, +) -> Union[u.Quantity[u.ph * u.s**-1 * u.m**-2], u.Quantity[u.ph * u.s**-1]]: """ + Integrate photons from spectrum in given wavelength range. Parameters ---------- - spectra - wave_min - [um] - wave_max - [um] - area : Quantity - [m2] - bandpass : SpectralElement - + spectra : SourceSpectrum + Input spectrum. + wave_min : Union[u.Quantity["length"], float] + Minimum wavelength. If float, assumes um. + wave_max : Union[u.Quantity["length"], float] + Maximum wavelength. If float, assumes um. + area : Optional[Union[u.Quantity["area"], float]], optional + Area to multiply with. If float, assumes m**2. The default is None. + bandpass : Optional[SpectralElement], optional + Filter to take into account, if any. The default is None. Returns ------- - counts : u.Quantity array + counts : astropy.units.Quantity + Either in ph/s/m**2 or just ph/s (if area was given). """ - - if isinstance(wave_min, u.Quantity): - wave_min = wave_min.to(u.Angstrom).value - else: - wave_min *= 1E4 - - if isinstance(wave_max, u.Quantity): - wave_max = wave_max.to(u.Angstrom).value - else: - wave_max *= 1E4 + # Note: Assuming um if given as float. + wave_min = (wave_min << u.um << u.Angstrom).value + wave_max = (wave_max << u.um << u.Angstrom).value + # Note: There appear to be some float shenanigans going on here, but + # rounding produces an error in the spectrum evaluation. Not sure what's + # going on, maybe it's fine as-is. counts = [] for spec in spectra: waveset = spec.waveset.value mask = (waveset > wave_min) * (waveset < wave_max) - x = waveset[mask] - x = np.append(np.append(wave_min, x), wave_max) - y = spec(x).value + wave = np.array([wave_min, *waveset[mask], wave_max]) + flux = spec(wave).value - # flux [ph s-1 cm-2] == y [ph s-1 cm-2 AA-1] * x [AA] + # flux [ph s-1 cm-2] == flux [ph s-1 cm-2 AA-1] * wave [AA] if isinstance(bandpass, SpectralElement): - bp = bandpass(x) bandpass.model.bounds_error = True - counts += [np.trapz(bp * y, x)] + counts.append(np.trapz(bandpass(wave).value * flux, wave)) else: - counts += [np.trapz(y, x)] + counts.append(np.trapz(flux, wave)) # counts = flux [ph s-1 cm-2] - counts = 1E4 * np.array(counts) # to get from cm-2 to m-2 - counts *= u.ph * u.s**-1 * u.m**-2 + counts = (counts * u.ph * u.s**-1 * u.cm**-2).to(u.ph * u.s**-1 * u.m**-2) if area is not None: - counts *= quantify(area, u.m ** 2) + counts *= (area << u.m**2) return counts def scale_imagehdu(imagehdu, waverange, area=None): - # ..todo: implement this - # For the moment, all imagehdu must be accompanied by a spectrum in PHOTLAM - # - # Future functionality will include scaling here of: - # ph s-1 - # ph s-1 m-2 - # ph s-1 m-2 - # ph s-1 m-2 um-1 - # ph s-1 m-2 um-1 arcsec-2 - # J s-1 m-2 Hz-1 - # J s-1 m-2 Hz-1 arcsec-2 - # ABMAG - # ABMAG arcsec-2 - # VEGAMAG - # VEGAMAG arcsec-2 - + """ + ..todo: implement this + + For the moment, all imagehdu must be accompanied by a spectrum in PHOTLAM + + Future functionality will include scaling here of: + ph s-1 + ph s-1 m-2 + ph s-1 m-2 + ph s-1 m-2 um-1 + ph s-1 m-2 um-1 arcsec-2 + J s-1 m-2 Hz-1 + J s-1 m-2 Hz-1 arcsec-2 + ABMAG + ABMAG arcsec-2 + VEGAMAG + VEGAMAG arcsec-2 + """ if "SPEC_REF" not in imagehdu.header: raise ValueError("For this version, an ImageHDU must be associated " "with a spectrum. This will change in the future.") @@ -150,14 +197,24 @@ def scale_imagehdu(imagehdu, waverange, area=None): return imagehdu -def make_img_wcs_header(pixel_scale, image_size): +def make_img_wcs_header( + pixel_scale: float, + image_size: tuple[int, int], +) -> fits.Header: """ - Create a WCS header for an image + Create a WCS header for an image. + Parameters + ---------- pixel_scale : float - arcsecs - image_size : tuple - x, y where x, y are integers + Pixel scale in arcsecs. + image_size : tuple[int, int] + Image size (x, y). + + Returns + ------- + TYPE + DESCRIPTION. """ ra, dec = 0, 0 @@ -167,8 +224,37 @@ def make_img_wcs_header(pixel_scale, image_size): imgwcs.wcs.ctype = ["RA---TAN", "DEC--TAN"] imgwcs.wcs.cunit = [u.deg, u.deg] imgwcs.wcs.crpix = [(x + 1) / 2, (y + 1) / 2] - imgwcs.wcs.cdelt = np.array([-pixel_scale / 3600, pixel_scale / 3600]) + imgwcs.wcs.cdelt = np.array([-pixel_scale, pixel_scale]) / 3600 imgwcs.wcs.crval = [ra, dec] imgwcs.wcs.cunit = [u.deg, u.deg] return imgwcs.to_header() + + +def parse_sed_table(filename: Path | str) -> Table: + """ + Parse SED table from example cubes. + + Parameters + ---------- + filename : Path | str + Input file path. + + Returns + ------- + astropy.table.Table + Parsed table. + + """ + tbl = Table.read(filename, format="ascii") + tbl.meta.update(convert_table_comments_to_dict(tbl)) + tbl.meta.pop("comments") + new_names = {} + for col in tbl.columns: + cmt = tbl.meta[col.replace("col", "column ")].split("(", maxsplit=1) + tbl[col].unit = cmt[-1].strip(")") + new_names[col] = cmt[0].split(";", maxsplit=1)[0].strip() + # Cannot do a single loop because tbl.columns would get mutated... + for old_name, new_name in new_names.items(): + tbl[old_name].name = new_name + return tbl diff --git a/scopesim/tests/mocks/py_objects/source_objects.py b/scopesim/tests/mocks/py_objects/source_objects.py index f3b6de8d..17205c26 100644 --- a/scopesim/tests/mocks/py_objects/source_objects.py +++ b/scopesim/tests/mocks/py_objects/source_objects.py @@ -55,6 +55,28 @@ def _table_source_overlapping(): return tbl_source +def _basic_img_hdu(weight): + n = 51 + im_wcs = wcs.WCS(naxis=2) + im_wcs.wcs.cunit = [u.arcsec, u.arcsec] + im_wcs.wcs.cdelt = [0.2, 0.2] + im_wcs.wcs.crval = [0, 0] + im_wcs.wcs.crpix = [(n + 1) / 2, (n + 1) / 2] + # im_wcs.wcs.ctype = ["RA---TAN", "DEC--TAN"] + im_wcs.wcs.ctype = ["LINEAR", "LINEAR"] + + im = np.random.random(size=(n, n)) * 1e-9 * weight + im[n-1, 1] += 5 * weight + im[1, 1] += 5 * weight + im[n // 2, n // 2] += 10 * weight + im[n // 2, n-1] += 5 * weight + + im_hdu = fits.ImageHDU(data=im, header=im_wcs.to_header()) + im_hdu.header["SPEC_REF"] = 0 + + return im_hdu + + def _image_source(dx=0, dy=0, angle=0, weight=1): """ Produce a source with 3 point sources on a random BG. @@ -77,23 +99,7 @@ def _image_source(dx=0, dy=0, angle=0, weight=1): specs = [SourceSpectrum(Empirical1D, points=wave, lookup_table=np.linspace(0, 4, n) * unit)] - n = 51 - im_wcs = wcs.WCS(naxis=2) - im_wcs.wcs.cunit = [u.arcsec, u.arcsec] - im_wcs.wcs.cdelt = [0.2, 0.2] - im_wcs.wcs.crval = [0, 0] - im_wcs.wcs.crpix = [(n + 1) / 2, (n + 1) / 2] - # im_wcs.wcs.ctype = ["RA---TAN", "DEC--TAN"] - im_wcs.wcs.ctype = ["LINEAR", "LINEAR"] - - im = np.random.random(size=(n, n)) * 1e-9 * weight - im[n-1, 1] += 5 * weight - im[1, 1] += 5 * weight - im[n // 2, n // 2] += 10 * weight - im[n // 2, n-1] += 5 * weight - - im_hdu = fits.ImageHDU(data=im, header=im_wcs.to_header()) - im_hdu.header["SPEC_REF"] = 0 + im_hdu = _basic_img_hdu(weight) im_source = Source(image_hdu=im_hdu, spectra=specs) angle = angle * np.pi / 180 @@ -137,12 +143,33 @@ def _cube_source(**kwargs): source """ n = 101 - im_src = _image_source(**kwargs) - data = im_src.fields[0].data + im_hdu = _basic_img_hdu(kwargs.get("weight", 1)) + # im_src = _image_source(**kwargs) + # data = im_src.fields[0].data + data = im_hdu.data + + # taken from Source + for i in [1, 2]: + unit = u.Unit(im_hdu.header[f"CUNIT{i}"].lower()) + val = float(im_hdu.header[f"CDELT{i}"]) + im_hdu.header[f"CUNIT{i}"] = "deg" + im_hdu.header[f"CDELT{i}"] = val * unit.to(u.deg) + + dx = kwargs.get("dx", 0) + dy = kwargs.get("dy", 0) + angle = kwargs.get("angle", 0) + + angle = angle * np.pi / 180 + im_hdu.header["CRVAL1"] += dx / 3600 + im_hdu.header["CRVAL2"] += dy / 3600 + im_hdu.header["PC1_1"] = np.cos(angle) + im_hdu.header["PC1_2"] = np.sin(angle) + im_hdu.header["PC2_1"] = -np.sin(angle) + im_hdu.header["PC2_2"] = np.cos(angle) # Broadcast the array onto a 3rd dimension and scale along the new axis - im_src.fields[0].data = data[None, :, :] * np.linspace(0, 4, n)[:, None, None] - im_src.spectra = [] + im_hdu.data = data[None, :, :] * np.linspace(0, 4, n)[:, None, None] + # im_src.spectra = {} # FIXME: CRPIX might be wrong here, aka off-by-one!! # But all other code assumes it like this, so I'm keeping it for now. @@ -151,7 +178,8 @@ def _cube_source(**kwargs): "CRVAL3": 1.5, "CRPIX3": 50, "SPEC_REF": None, "BUNIT": "ph s-1 m-2 um-1"} - im_src.fields[0].header.update(cube_hdr_dict) + im_hdu.header.update(cube_hdr_dict) + im_src = Source(cube=im_hdu) return im_src diff --git a/scopesim/tests/tests_optics/test_FieldOfView.py b/scopesim/tests/tests_optics/test_FieldOfView.py index 6eb45eb5..b4b26d35 100644 --- a/scopesim/tests/tests_optics/test_FieldOfView.py +++ b/scopesim/tests/tests_optics/test_FieldOfView.py @@ -403,7 +403,7 @@ def test_make_spectrum_from_table(self): spec = fov.make_spectrum() in_sum = np.sum([n * spec(fov.waveset).value - for n, spec in zip([3, 1, 1], src_table.spectra)]) # sum of weights [3,1,1] + for n, spec in zip([3, 1, 1], src_table.spectra.values())]) # sum of weights [3,1,1] out_sum = np.sum(spec(fov.waveset).value) assert in_sum == approx(out_sum) @@ -445,7 +445,7 @@ def test_makes_spectrum_from_all_types_of_source_object(self): spec = fov.make_spectrum() table_sum = np.sum([n * spec(fov.waveset).value - for n, spec in zip([3, 1, 1], src_table.spectra)]) # sum of weights [3,1,1] + for n, spec in zip([3, 1, 1], src_table.spectra.values())]) # sum of weights [3,1,1] image_sum = np.sum(src_image.fields[0].data) * \ np.sum(src_image.spectra[0](fov.waveset).value) cube_sum = np.sum(src_cube.fields[0].data[70:81, :, :]) * 1e-8 diff --git a/scopesim/tests/tests_source/test_source_Source.py b/scopesim/tests/tests_source/test_source_Source.py index bc813f98..3859ee5d 100644 --- a/scopesim/tests/tests_source/test_source_Source.py +++ b/scopesim/tests/tests_source/test_source_Source.py @@ -16,6 +16,7 @@ from scopesim.source import source_utils from scopesim.source.source import Source +from scopesim.source.source_fields import CubeSourceField from scopesim.optics.image_plane import ImagePlane from scopesim.utils import convert_table_comments_to_dict @@ -137,22 +138,22 @@ def test_initialises_with_table_and_2_spectrum(self, ii, src = Source(table=table, spectra=input_spectra) assert isinstance(src, Source) assert isinstance(src.spectra[0], SourceSpectrum) - assert isinstance(src.fields[0], Table) + assert isinstance(src.fields[0].field, Table) src.shift(0.1, 0.2) def test_initialises_with_image_and_1_spectrum(self, input_hdulist, input_spectra): - src = Source(image_hdu=input_hdulist[0], spectra=input_spectra) + src = Source(image_hdu=input_hdulist[0], spectra=input_spectra[0]) assert isinstance(src, Source) assert isinstance(src.spectra[0], SourceSpectrum) - assert isinstance(src.fields[0], fits.ImageHDU) + assert isinstance(src.fields[0].field, fits.ImageHDU) src.shift(0.1, 0.2) def test_initialises_with_image_and_flux(self, input_hdulist): src = Source(image_hdu=input_hdulist[0], flux=20*u.ABmag) assert isinstance(src, Source) assert isinstance(src.spectra[0], SourceSpectrum) - assert isinstance(src.fields[0], fits.ImageHDU) + assert isinstance(src.fields[0].field, fits.ImageHDU) src.shift(0.1, 0.2) def test_initialises_with_only_image(self, input_hdulist): @@ -175,7 +176,7 @@ def test_initialises_with_only_imagehdu_and_arcsec2(self): assert isinstance(src, Source) assert isinstance(src.spectra[0], SourceSpectrum) - assert isinstance(src.fields[0], fits.ImageHDU) + assert isinstance(src.fields[0].field, fits.ImageHDU) src.shift(0.1, 0.2) @pytest.mark.parametrize("ii, dtype", @@ -185,10 +186,10 @@ def test_initialises_with_only_imagehdu_and_arcsec2(self): def test_initialises_with_filename_and_spectrum(self, ii, dtype, input_files, input_spectra): fname = input_files[ii] - src = Source(filename=fname, spectra=input_spectra) + src = Source(filename=fname, spectra=input_spectra[0]) assert isinstance(src, Source) assert isinstance(src.spectra[0], SourceSpectrum) - assert isinstance(src.fields[0], dtype) + assert isinstance(src.fields[0].field, dtype) src.shift(0.1, 0.2) def test_initialised_with_old_style_arrays(self): @@ -199,7 +200,7 @@ def test_initialised_with_old_style_arrays(self): src = Source(x=x, y=y, ref=ref, weight=weight, lam=lam, spectra=spectra) assert isinstance(src, Source) assert isinstance(src.spectra[0], SourceSpectrum) - assert isinstance(src.fields[0], Table) + assert isinstance(src.fields[0].field, Table) src.shift(0.1, 0.2) @@ -209,18 +210,16 @@ def test_ref_column_always_references_correct_spectrum(self, table_source, image_source.append(table_source) comb_refs = image_source.fields[1]["ref"] tbl_refs = table_source.fields[0]["ref"] - assert np.all(tbl_refs.data + 1 == comb_refs.data) + assert all(tbl_refs.data + 1 == comb_refs.data) assert image_source.fields[0].header["SPEC_REF"] == 0 - assert len(image_source.fields) == len(image_source._meta_dicts) image_source.shift(0.1, 0.2) def test_same_as_above_but_reversed(self, table_source, image_source): new_source = table_source + image_source comb_refs = new_source.fields[0]["ref"] tbl_refs = table_source.fields[0]["ref"] - assert np.all(tbl_refs.data == comb_refs.data) + assert all(tbl_refs.data == comb_refs.data) assert new_source.fields[1].header["SPEC_REF"] == 3 - assert len(new_source.fields) == len(new_source._meta_dicts) new_source.shift(0.1, 0.2) def test_imagehdu_with_empty_spec_ref_is_handled(self, table_source, @@ -228,7 +227,6 @@ def test_imagehdu_with_empty_spec_ref_is_handled(self, table_source, image_source.fields[0].header["SPEC_REF"] = "" new_source = table_source + image_source assert new_source.fields[1].header["SPEC_REF"] == "" - assert len(new_source.fields) == len(new_source._meta_dicts) def test_fits_image_and_array_image_are_added_correctly(self): img_src = so._image_source() @@ -238,25 +236,23 @@ def test_fits_image_and_array_image_are_added_correctly(self): fits_img_src = fits_src + img_src assert len(img_src.fields) == 1 - assert len(img_src.fields) == len(img_src._meta_dicts) assert len(fits_src.fields) == 1 - assert len(fits_src.fields) == len(fits_src._meta_dicts) assert len(img_fits_src.fields) == 2 - assert len(img_fits_src.fields) == len(img_fits_src._meta_dicts) assert len(fits_img_src.fields) == 2 - assert len(img_fits_src.fields) == len(img_fits_src._meta_dicts) - assert np.all(fits_img_src.fields[0].data == fits_src.fields[0].data) + assert (fits_img_src.fields[0].data == fits_src.fields[0].data).all() assert img_fits_src.fields[0] is not img_src.fields[0] + @pytest.mark.skip(reason="_meta_dicts was removed, find a better way to perform the same check...") def test_meta_data_is_passed_on_when_added(self, table_source, image_source): - table_source.meta["hello"] = "world" - image_source.meta["servus"] = "oida" + table_source.fields[0].meta["hello"] = "world" + image_source.fields[0].meta["servus"] = "oida" new_source = table_source + image_source assert len(new_source.fields) == len(new_source._meta_dicts) assert new_source._meta_dicts[0]["hello"] == "world" assert new_source._meta_dicts[1]["servus"] == "oida" + @pytest.mark.skip(reason="_meta_dicts was removed, find a better way to perform the same check...") def test_empty_source_is_the_additive_identity(self, image_source): new_source_1 = Source() + image_source assert len(new_source_1.fields) == len(new_source_1._meta_dicts) @@ -309,14 +305,14 @@ def test_combines_more_that_one_field_into_image(self, image_source, class TestSourcePhotonsInRange: def test_correct_photons_are_returned_for_table_source(self, table_source): ph = table_source.photons_in_range(1, 2) - assert np.all(np.isclose(ph.value, [4., 2., 2.])) + assert np.allclose(ph.value, [4., 2., 2.]) def test_correct_photons_are_returned_for_image_source(self, image_source): ph = image_source.photons_in_range(1, 2) - assert np.all(np.isclose(ph.value, [2.])) + assert np.allclose(ph.value, [2.]) def test_correct_photons_are_returned_for_no_spectra(self, image_source): - image_source.spectra = [] + image_source.fields[0].spectra = {} ph = image_source.photons_in_range(1, 2) assert len(ph) == 0 @@ -325,10 +321,10 @@ def test_photons_increase_with_area(self, area, expected, image_source): ph = image_source.photons_in_range(1, 2, area=area) assert ph[0].value == approx(expected) - def test_photons_returned_only_for_indexes(self, table_source): - ph = table_source.photons_in_range(1, 2, indexes=[0, 2]) + def test_photons_returned_only_for_indices(self, table_source): + ph = table_source.photons_in_range(1, 2, indices=[0, 2]) assert len(ph) == 2 - assert np.all(np.isclose(ph.value, [4, 2])) + assert np.allclose(ph.value, [4, 2]) class TestSourceShift: @@ -387,7 +383,7 @@ def test_returns_correct_half_flux_with_bandpass(self): (np.linspace(0, 1, 11)**0.5, 100, 34.931988)]) def test_with_bandpass_and_area_returns_correct_value(self, flux, area, expected): - flux = flux * u.Unit("ph s-1 m-2 um-1") + flux *= u.Unit("ph s-1 m-2 um-1") spec = SourceSpectrum(Empirical1D, points=np.linspace(0.5, 2.5, 11) * u.um, lookup_table=flux) @@ -400,6 +396,56 @@ def test_with_bandpass_and_area_returns_correct_value(self, flux, area, assert counts.value == approx(expected) +class TestSpectraListConverter: + def test_works_for_arrays(self): + spec = source_utils.convert_to_list_of_spectra( + np.array([0, 1, 1, 0]), np.array([1, 2, 3, 4])) + assert isinstance(spec[0], SourceSpectrum) + + def test_works_for_2d_arrays(self): + spec = source_utils.convert_to_list_of_spectra( + np.array([[0, 1, 1, 0], [0, 1, 1, 0]]), + np.array([1, 2, 3, 4])) + assert all(isinstance(sp, SourceSpectrum) for sp in spec) + + def test_works_for_multiple_1d_arrays(self): + spec = source_utils.convert_to_list_of_spectra( + [np.array([0, 1, 1, 0]), np.array([0, 1, 1, 0])], + np.array([1, 2, 3, 4])) + assert all(isinstance(sp, SourceSpectrum) for sp in spec) + + def test_throws_for_array_mismatch(self): + with pytest.raises(TypeError): + source_utils.convert_to_list_of_spectra( + np.array([0, 1, 1, 0]), [1, 2, 3, 4]) + + def test_throws_for_multiple_array_mismatch(self): + with pytest.raises(ValueError): + source_utils.convert_to_list_of_spectra( + [np.array([0, 1, 1, 0]), [0, 1, 1, 0]], + [np.array([1, 2, 3, 4]), [1, 2, 3, 4]]) + + +def test_cube_source_field(): + size = 5 + hdu = fits.ImageHDU(data=np.arange(size**3).reshape(3*(size,))) + + hdu.header["CUNIT1"] = "arcsec" + hdu.header["CUNIT2"] = "arcsec" + hdu.header["CUNIT3"] = "um" + hdu.header["CTYPE3"] = "WAVE" + hdu.header["CRVAL1"] = 0 + hdu.header["CRVAL2"] = 0 + csf = CubeSourceField(hdu) + + np.testing.assert_equal(csf.wave.value, np.arange(1, 6)) + csf.shift(2, 3) + assert csf.header["CRVAL1"] == 2 + assert csf.header["CRVAL2"] == 3 + + _, ax = plt.subplots() + csf.plot(ax, "red") + # # class TestScaleImageHDU: # def test_scaling_properly_for_si_photlam_in_header(self): diff --git a/scopesim/tests/tests_source/test_source_templates.py b/scopesim/tests/tests_source/test_source_templates.py index 3b8cd148..300f7118 100644 --- a/scopesim/tests/tests_source/test_source_templates.py +++ b/scopesim/tests/tests_source/test_source_templates.py @@ -1,13 +1,11 @@ import pytest from pytest import approx -from unittest.mock import patch from matplotlib import pyplot as plt from astropy import units as u from astropy.table import Table -from scopesim import rc from scopesim import load_example_optical_train from scopesim.source import source_templates as src_ts from scopesim.source.source import Source @@ -35,7 +33,7 @@ def test_star_field_throws_error_with_no_kwargs(self): def test_star_fields_data(self): src = src_ts.star_field(100, 15, 25, 60) - assert isinstance(src.fields[0], Table) + assert isinstance(src.fields[0].field, Table) assert all(src.fields[0]["weight"] == 10**(-0.4 * src.fields[0]["mag"])) src.shift(0.1, 0.2)