Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove units from MOS pipeline. #1441

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
48 commits
Select commit Hold shift + click to select a range
27195a3
Remove units from MOS pipeline.
mairanteodoro Oct 7, 2024
75cb155
Update to point to temp RAD and datamodels installation.
mairanteodoro Oct 7, 2024
953961a
First refactoring of TweakRegStep to use stcal.
mairanteodoro Aug 29, 2024
89bd44e
Code cleanup/refactoring.
mairanteodoro Aug 30, 2024
c033702
Replace local method with stcal's.
mairanteodoro Aug 30, 2024
f015feb
Further code refactoring.
mairanteodoro Sep 4, 2024
a99761c
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Sep 4, 2024
79b667c
Utilize stcal's get_catalog method.
mairanteodoro Sep 4, 2024
42fef2c
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Sep 4, 2024
8991f9f
Add clip_accum=True to call to alignment methods.
mairanteodoro Sep 5, 2024
76eb339
Point stcal installation to main branch.
mairanteodoro Sep 10, 2024
7091433
Revert some refactoring.
mairanteodoro Sep 11, 2024
2313559
Refactor handling of invalid exposure types.
mairanteodoro Sep 16, 2024
f316920
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Sep 16, 2024
9d48114
Address comments 2.
mairanteodoro Sep 17, 2024
a97482a
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Sep 17, 2024
c365437
Style refactoring.
mairanteodoro Sep 18, 2024
4876457
remove default values from docs
braingram Sep 23, 2024
1b6729a
add changelog
braingram Sep 23, 2024
be13903
fix warning
braingram Sep 23, 2024
89e7148
[CI] rename changelog check job to be more explicit on its purpose (#…
zacharyburnett Sep 24, 2024
7ea6e0a
update pull request checklist (#1336)
zacharyburnett Sep 24, 2024
2f96fba
RCAL-895: allow updating source catalog with tweaked WCS when running…
mairanteodoro Sep 26, 2024
91e9794
fix artifactory_repos for pytest 8
braingram Oct 1, 2024
6fb921c
remove MultilineLogger
braingram Aug 22, 2024
cc62ed2
remove unused variables
braingram Aug 8, 2024
a0d224a
raise on invalid input
braingram Aug 8, 2024
a016b37
update outlier detection docs for spec update
braingram Aug 8, 2024
ac9d89e
reference spec in arguments docs
braingram Aug 8, 2024
ddb72f3
add fragments
braingram Sep 30, 2024
128b657
use outlier detection from stcal
braingram Sep 30, 2024
4f6a6e9
remove unused kernel_size from docs
braingram Sep 30, 2024
a6a180a
update docs for new memory model
braingram Sep 30, 2024
3d31901
use resample_group in resample_many_to_many
braingram Sep 30, 2024
1f159fa
unskip and fix test
braingram Sep 30, 2024
dc7dd09
remove outdated comments
braingram Sep 30, 2024
35c8773
Update docs from review.
braingram Oct 1, 2024
a6bbac5
clarify in_memory
braingram Oct 1, 2024
d290ed4
fix test docstrings
braingram Oct 1, 2024
6c960ab
fix make_output_path docstring entry, add description of default buff…
braingram Oct 1, 2024
e81ff48
expand resample_group docstring
braingram Oct 1, 2024
d99682f
docs typo
braingram Oct 1, 2024
9c2a33f
add failing unit test
braingram Oct 3, 2024
ae00da2
work around NotImplemented Quantity.tofile
braingram Oct 3, 2024
50b439a
add changelog fragment
braingram Oct 3, 2024
f996663
Merge branch 'main' into RCAL-911-remove-units-from-mosaic-level-pipe…
mairanteodoro Oct 7, 2024
4abdef1
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Oct 7, 2024
83eae28
Check-style fixes.
mairanteodoro Oct 7, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ dependencies = [
"pyparsing >=2.4.7",
"requests >=2.26",
# "rad>=0.21.0,<0.22.0",
"rad @ git+https://github.com/spacetelescope/rad.git",
"rad @ git+https://github.com/mairanteodoro/rad.git@RCAL-911-remove-units-from-mosaic-level-pipeline",
# "roman_datamodels>=0.21.0,<0.22.0",
"roman_datamodels @ git+https://github.com/spacetelescope/roman_datamodels.git",
"roman_datamodels @ git+https://github.com/mairanteodoro/roman_datamodels.git@RCAL-911-remove-units-from-mosaic-level-pipeline",
"scipy >=1.11",
# "stcal>=1.8.0,<1.9.0",
"stcal @ git+https://github.com/spacetelescope/stcal.git@main",
Expand Down
24 changes: 6 additions & 18 deletions romancal/flux/flux_step.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import logging

import astropy.units as u
from roman_datamodels import datamodels

from ..datamodels import ModelLibrary
Expand All @@ -14,10 +13,6 @@
__all__ = ["FluxStep"]


# Define expected Level 2 units
LV2_UNITS = u.DN / u.s


class FluxStep(RomanStep):
"""Apply flux scaling to count-rate data

Expand Down Expand Up @@ -104,26 +99,19 @@ def apply_flux_correction(model):
DATA = ("data", "err")
VARIANCES = ("var_rnoise", "var_poisson", "var_flat")

if model.data.unit == model.meta.photometry.conversion_megajanskys.unit:
log.info(
f"Input data is already in flux units of {model.meta.photometry.conversion_megajanskys.unit}."
)
log.info("Flux correction already applied.")
return

if model.data.unit != LV2_UNITS:
if model.meta.cal_step["flux"] == "COMPLETE":
message = (
f"Input data units {model.data.unit} are not in the expected units of {LV2_UNITS}"
"\nAborting flux correction"
"Input data is already in flux units of MJy/sr."
"\nFlux correction already applied."
)
[log.error(line) for line in message.splitlines()]
raise ValueError(message)
log.info(message)
return
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we should just delete this stanza. Do we special case the "already complete" step anywhere else?


# Apply the correction.
# The end goal in units is to have MJy/sr. The scale is in MJy/sr also.
# Hence the extra factor of s/DN must be applied to cancel DN/s.
log.debug("Flux correction being applied")
c_mj = model.meta.photometry.conversion_megajanskys / model.data.unit
c_mj = model.meta.photometry.conversion_megajanskys
for data in DATA:
model[data] = model[data] * c_mj
for variance in VARIANCES:
Expand Down
94 changes: 93 additions & 1 deletion romancal/regtest/test_mos_pipeline.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,18 @@
""" Roman tests for the High Level Pipeline """

import json
import os
from pathlib import Path

import asdf
import pytest
import roman_datamodels as rdm
from astropy.units import Quantity

from romancal.associations.asn_from_list import asn_from_list
from romancal.pipeline.mosaic_pipeline import MosaicPipeline

from ..associations.association_io import json as asn_json
from .regtestdata import compare_asdf


Expand All @@ -17,6 +23,86 @@ def passfail(bool_expr):
return "Fail"


class RegtestFileModifier:
# TODO: remove this entire class once the units
# have been removed from the regtest files

def __init__(self, rtdata):
self.rtdata = rtdata
self.updated_asn_fname = None
self.truth_parent = Path(rtdata.truth).parent
self.input_parent = Path(rtdata.input).parent
self.truth_relative_path = Path(self.truth_parent).relative_to(
self.input_parent
)
self.truth_path = self.truth_relative_path / f"{Path(self.rtdata.truth).name}"

@staticmethod
def create_unitless_file(input_filename: str, output_filename: str) -> None:
with asdf.config_context() as cfg:
cfg.validate_on_read = False
cfg.validate_on_save = False
af = asdf.open(input_filename)

for attr in af.tree["roman"]:
item = getattr(af.tree["roman"], attr)
if isinstance(item, Quantity):
setattr(af.tree["roman"], attr, item.value)

for attr in af.tree["roman"].meta.photometry:
item = getattr(af.tree["roman"].meta.photometry, attr)
if isinstance(item, Quantity):
setattr(af.tree["roman"].meta.photometry, attr, item.value)

af.write_to(output_filename)

def create_new_asn_file(self, output_filename_list: list):
updated_asn = asn_from_list(
output_filename_list,
product_name=f"{self.rtdata.asn['products'][0]['name']}_no_units",
)
updated_asn["target"] = "none"

current_asn_fname = Path(self.rtdata.input)
self.updated_asn_fname = (
f"{current_asn_fname.stem}_no_units{current_asn_fname.suffix}"
)

_, serialized_updated_asn = asn_json.dump(updated_asn)
with open(self.updated_asn_fname, "w") as f:
json.dump(
json.loads(serialized_updated_asn), f, indent=4, separators=(",", ": ")
)

def update_rtdata(self):
rtdata_root_path = Path(self.rtdata.input).parent
self.rtdata.input = f"{rtdata_root_path}/{Path(self.updated_asn_fname)}"
# r0099101001001001001_F158_visit_no_units_i2d.asdf
self.rtdata.output = f"{rtdata_root_path}/{Path(self.rtdata.output.split('_i2d')[0]).stem}_no_units_i2d{Path(self.rtdata.output).suffix}"

def prepare_regtest_input_files(self):
input_filenames = [
x["expname"] for x in self.rtdata.asn["products"][0]["members"]
]
input_filenames.append(str(self.truth_path))
output_filename_list = []
# include truth file
for input_filename in input_filenames:
fname = Path(input_filename)
if str(fname).startswith(str(self.truth_relative_path)):
output_filename = Path(
f"{str(fname).split('_i2d.asdf')[0]}_no_units_i2d{fname.suffix}"
)
self.rtdata.truth = str(self.truth_parent / output_filename.name)
else:
output_filename = f"{fname.stem}_no_units{fname.suffix}"
output_filename_list.append(output_filename)
self.create_unitless_file(input_filename, output_filename)

self.create_new_asn_file(output_filename_list)
self.update_rtdata()


@pytest.mark.bigdata
@pytest.mark.soctests
def test_level3_mos_pipeline(rtdata, ignore_asdf_paths):
Expand All @@ -26,12 +112,18 @@ def test_level3_mos_pipeline(rtdata, ignore_asdf_paths):
# Test Pipeline
output = "r0099101001001001001_F158_visit_i2d.asdf"
rtdata.output = output

rtdata.get_truth(f"truth/WFI/image/{output}")

fixer = RegtestFileModifier(rtdata)
fixer.prepare_regtest_input_files()

args = [
"roman_mos",
rtdata.input,
]
MosaicPipeline.from_cmdline(args)
rtdata.get_truth(f"truth/WFI/image/{output}")

diff = compare_asdf(rtdata.output, rtdata.truth, **ignore_asdf_paths)
assert diff.identical, diff.report()

Expand Down
2 changes: 1 addition & 1 deletion romancal/resample/gwcs_drizzle.py
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,7 @@ def dodrizzle(
log.info(f"Drizzling {insci.shape} --> {outsci.shape}")

_vers, nmiss, nskip = cdrizzle.tdriz(
insci.astype(np.float32).value,
insci.astype(np.float32),
inwht.astype(np.float32),
pixmap,
outsci,
Expand Down
35 changes: 15 additions & 20 deletions romancal/resample/resample.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,18 +386,15 @@ def resample_many_to_one(self):
exptime_tot = self.resample_exposure_time(output_model)

# TODO: fix unit here
output_model.err = u.Quantity(
np.sqrt(
np.nansum(
[
output_model.var_rnoise,
output_model.var_poisson,
output_model.var_flat,
],
axis=0,
)
),
unit=output_model.err.unit,
output_model.err = np.sqrt(
np.nansum(
[
output_model.var_rnoise,
output_model.var_poisson,
output_model.var_flat,
],
axis=0,
)
)

self.update_exposure_times(output_model, exptime_tot)
Expand All @@ -416,7 +413,7 @@ def resample_variance_array(self, name, output_model):
This modifies ``output_model`` in-place.
"""
output_wcs = self.output_wcs
inverse_variance_sum = np.full_like(output_model.data.value, np.nan)
inverse_variance_sum = np.full_like(output_model.data, np.nan)

log.info(f"Resampling {name}")
with self.input_models:
Expand Down Expand Up @@ -482,9 +479,7 @@ def resample_variance_array(self, name, output_model):
# We now have a sum of the inverse resampled variances. We need the
# inverse of that to get back to units of variance.
# TODO: fix unit here
output_variance = u.Quantity(
np.reciprocal(inverse_variance_sum), unit=u.MJy**2 / u.sr**2
)
output_variance = np.reciprocal(inverse_variance_sum)

setattr(output_model, name, output_variance)

Expand Down Expand Up @@ -537,7 +532,7 @@ def resample_exposure_time(self, output_model):
ymax=ymax,
)

exptime_tot += resampled_exptime.value
exptime_tot += resampled_exptime
self.input_models.shelve(model, i, modify=False)

return exptime_tot
Expand Down Expand Up @@ -723,11 +718,11 @@ def drizzle_arrays(
log.info(f"Drizzling {insci.shape} --> {outsci.shape}")

_vers, _nmiss, _nskip = cdrizzle.tdriz(
insci.astype(np.float32).value,
insci.astype(np.float32),
inwht,
pixmap,
outsci.value,
outwht.value,
outsci,
outwht,
outcon,
uniqid=uniqid,
xmin=xmin,
Expand Down
2 changes: 1 addition & 1 deletion romancal/resample/resample_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def build_driz_weight(
and model.var_rnoise.shape == model.data.shape
):
with np.errstate(divide="ignore", invalid="ignore"):
inv_variance = model.var_rnoise.value**-1
inv_variance = model.var_rnoise**-1
inv_variance[~np.isfinite(inv_variance)] = 1
else:
warnings.warn(
Expand Down
8 changes: 3 additions & 5 deletions romancal/skymatch/skymatch.py
Original file line number Diff line number Diff line change
Expand Up @@ -460,8 +460,7 @@ def _overlap_matrix(images, apply_sky=True):
# TODO: to improve performance, the nested loops could be parallelized
# since _calc_sky() here can be called independently from previous steps.
ns = len(images)
data_unit = images[0].image.unit
A = np.zeros((ns, ns), dtype=float) * data_unit
A = np.zeros((ns, ns), dtype=float)
W = np.zeros((ns, ns), dtype=float)
for i in range(ns):
for j in range(i + 1, ns):
Expand All @@ -482,7 +481,6 @@ def _overlap_matrix(images, apply_sky=True):
def _find_optimum_sky_deltas(images, apply_sky=True):
ns = len(images)
A, W = _overlap_matrix(images, apply_sky=apply_sky)
data_unit = images[0].image.unit

def is_valid(i, j):
return W[i, j] > 0 and W[j, i] > 0
Expand Down Expand Up @@ -514,7 +512,7 @@ def is_valid(i, j):
if is_valid(i, j):
K[ieq, i] = Wm[i, j]
K[ieq, j] = -Wm[i, j]
F[ieq] = Wm[i, j] * (A[j, i] - A[i, j]).value
F[ieq] = Wm[i, j] * (A[j, i] - A[i, j])
invalid[i] = False
invalid[j] = False
ieq += 1
Expand All @@ -538,4 +536,4 @@ def is_valid(i, j):

deltas = np.dot(invK, F)
deltas[np.asarray(invalid, dtype=bool)] = np.nan
return deltas * data_unit
return deltas
4 changes: 1 addition & 3 deletions romancal/skymatch/skymatch_step.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,9 +173,7 @@ def _imodel2skyim(self, image_model):

def _set_sky_background(self, sky_image, step_status):
image = sky_image.meta["image_model"]
sky = sky_image.sky
if sky == 0 or sky is None:
sky = 0 * image.data.unit
sky = sky_image.sky if sky_image.sky is not None else 0

image.meta.background.method = str(self.skymethod)
image.meta.background.subtracted = self.subtract
Expand Down
6 changes: 3 additions & 3 deletions romancal/skymatch/skystatistics.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,14 +126,14 @@ def calc_sky(self, data):
in `skyvalue`.

"""
imstat = ImageStats(image=data.value, fields=self._fields, **(self._kwargs))
imstat = ImageStats(image=data, fields=self._fields, **(self._kwargs))
stat = self._skystat(imstat) # dict or scalar

# re-attach units:
if hasattr(stat, "__len__"):
self.skyval = {k: value * data.unit for k, value in stat.items()}
self.skyval = {k: value for k, value in stat.items()}
else:
self.skyval = stat * data.unit
self.skyval = stat

self.npix = imstat.npix
return self.skyval, self.npix
Expand Down
4 changes: 0 additions & 4 deletions romancal/source_catalog/detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import warnings

from astropy.convolution import convolve
from astropy.units import Quantity
from astropy.utils.exceptions import AstropyUserWarning
from photutils.segmentation import SourceFinder, make_2dgaussian_kernel
from photutils.utils.exceptions import NoDetectionsWarning
Expand Down Expand Up @@ -40,9 +39,6 @@ def convolve_data(data, kernel_fwhm, size=None, mask=None):
convolved_data : `numpy.ndarray`
The convolved data array.
"""
if not isinstance(data, Quantity):
raise ValueError("Input model must be a Quantity array.")

size = math.ceil(kernel_fwhm * 3)
size = size + 1 if size % 2 == 0 else size # make size be odd
kernel = make_2dgaussian_kernel(kernel_fwhm, size=size) # normalized to 1
Expand Down
Loading
Loading