Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/reduce recording #297

Open
wants to merge 26 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
2dd4c67
initial work on reduce recording for interactive save/restore
Aug 23, 2021
38c559c
updates for wavecal/fit1d interactive record/replay
Aug 24, 2021
886ce15
code commenting for record/replay functionality
Aug 24, 2021
386ea45
Apertures interactive record/load functionality
Aug 25, 2021
5cde230
more fixes to record/restore logic and side effects in the turbo tabs
Aug 25, 2021
ea0530e
added mask encoding/decoding simple version to compress the json a bit
Aug 26, 2021
5e6a07b
Stop replaying saved values if a user modifies one of the interactive…
Aug 30, 2021
67f2b4a
detect changes during replay, alert user and continue without the fur…
Aug 31, 2021
e47b5c2
record/replay via .fits file output, different unique suffix for outp…
Sep 1, 2021
e21e994
manually checking for --replay so I can support a .fits argument
Sep 1, 2021
cc17e34
switching to hasattr for record field check
Sep 1, 2021
09a2a57
layered in None check for record
Sep 1, 2021
c2399fa
consolidating if, trying to trigger Jenkins tests
Sep 2, 2021
194f5a9
merging in latest from master
Oct 28, 2021
c4acf75
Merge branch 'master' into feature/reduce_recording
Dec 17, 2021
4ca2a8f
merging latest from master
Feb 12, 2022
f70d789
Merge branch 'master' into feature/reduce_recording
Mar 30, 2022
ff3b1a9
adding docs for record/replay
Mar 31, 2022
8264576
Merge branch 'master' into feature/reduce_recording
Apr 11, 2022
bd76980
properly strip datalab extension that contain a number like -QL-2D. …
KathleenLabrie May 2, 2022
e878ec3
Merge branch 'master' of github.com:GeminiDRSoftware/DRAGONS
May 20, 2022
ae3ef6b
Revert "properly strip datalab extension that contain a number like -…
May 20, 2022
4cd13e2
Merge branch 'master' of github.com:GeminiDRSoftware/DRAGONS
May 23, 2022
33a4107
Merge branch 'master' of github.com:GeminiDRSoftware/DRAGONS
May 23, 2022
f807c36
Merge branch 'master' of github.com:GeminiDRSoftware/DRAGONS
May 31, 2022
a83a8e2
Merge branch 'master' into feature/reduce_recording
May 31, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions astrodata/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,6 +444,20 @@ def wcs(self):
def wcs(self, value):
self.nddata.wcs = value

@property
def record(self):
"""Returns the record of reduction that produced this file, or None."""
if self.is_single:
return self.nddata.record
else:
raise ValueError("Cannot return record for an AstroData object "
"that is not a single slice")

@record.setter
@assign_only_single_slice
def record(self, value):
self.nddata.record = value

def __iter__(self):
if self.is_single:
yield self
Expand Down
172 changes: 172 additions & 0 deletions astrodata/fits.py
Original file line number Diff line number Diff line change
Expand Up @@ -448,6 +448,7 @@ def associated_extensions(ver):
'uncertainty': None,
'mask': None,
'wcs': None,
'record': None,
'other': [],
}

Expand All @@ -461,6 +462,8 @@ def associated_extensions(ver):
parts['uncertainty'] = extra_unit
elif name == 'WCS':
parts['wcs'] = extra_unit
elif name == 'RECORD':
parts['record'] = extra_unit
else:
parts['other'].append(extra_unit)

Expand Down Expand Up @@ -510,6 +513,11 @@ def associated_extensions(ver):
# In case WCS info is in the PHU
nd.wcs = fitswcs_to_gwcs(hdulist[0].header)

if parts['record'] is not None:
nd.record = asdftablehdu_to_record(parts['record'])
else:
nd.record = None

ad.append(nd, name=DEFAULT_EXTENSION)

# This is used in the writer to keep track of the extensions that
Expand Down Expand Up @@ -598,6 +606,9 @@ def ad_to_hdulist(ad):
if isinstance(wcs, gWCS):
hdul.append(wcs_to_asdftablehdu(ext.wcs, extver=ver))

if hasattr(ext, "record") and ext.record is not None:
hdul.append(record_to_asdftablehdu(ext.record, extver=ver))

for name, other in ext.meta.get('other', {}).items():
if isinstance(other, Table):
hdu = table_to_bintablehdu(other, extname=name)
Expand Down Expand Up @@ -851,3 +862,164 @@ def asdftablehdu_to_wcs(hdu):
return

return wcs


def data_to_asdftablehdu(name, table_name, col_name, data, extver=None):
"""
Serialize a data object as a FITS TableHDU (ASCII) extension.

The ASCII table is actually a mini ASDF file. The constituent AstroPy
models must have associated ASDF "tags" that specify how to serialize them.

In the event that serialization as pure ASCII fails (this should not
happen), a binary table representation will be used as a fallback.

Parameters
----------
name : str
Name in the Asdf of the data to be saved. This is something like 'wcs' or 'record' and should be unique.
col_name : str
Name of the column to holding the data
table_name : str
Name of the bintable holding the column with the data
data : str
Blob of data to save, in some string encoded form
"""

# Create a small ASDF file in memory containing the WCS object
# representation because there's no public API for generating only the
# relevant YAML subsection and an ASDF file handles the "tags" properly.
try:
dat = {}
dat[name] = data
af = asdf.AsdfFile(dat)
except jsonschema.exceptions.ValidationError:
# (The original traceback also gets printed here)
raise TypeError("Cannot serialize model(s) for '{}' extension {}"
.format(table_name, extver or ''))

# ASDF can only dump YAML to a binary file object, so do that and read
# the contents back from it for storage in a FITS extension:
with BytesIO() as fd:
with af:
# Generate the YAML, dumping any binary arrays as text:
af.write_to(fd, all_array_storage='inline')
fd.seek(0)
databuf = fd.read()

# Convert the bytes to readable lines of text for storage (falling back to
# saving as binary in the unexpected event that this is not possible):
try:
databuf = databuf.decode('ascii').splitlines()
except UnicodeDecodeError:
# This should not happen, but if the ASDF contains binary data in
# spite of the 'inline' option above, we have to dump the bytes to
# a non-human-readable binary table rather than an ASCII one:
LOGGER.warning("Could not convert {} ASDF to ASCII; saving table "
"as binary".format(extver or ''))
hduclass = BinTableHDU
fmt = 'B'
databuf = np.frombuffer(databuf, dtype=np.uint8)
else:
hduclass = TableHDU
fmt = 'A{}'.format(max(len(line) for line in databuf))

# Construct the FITS table extension:
col = Column(name=col_name, format=fmt, array=databuf,
ascii=hduclass is TableHDU)
return hduclass.from_columns([col], name=table_name, ver=extver)


def asdftablehdu_to_data(hdu, name, col_name):
"""
Recreate a previously stored data object from its serialization in a FITS table extension.

Returns None (issuing a warning) if the extension cannot be parsed, so
the rest of the file can still be read.

Parameters
----------
hdu : :class:`~.BinTableHDU`
HDU to extract data from
name : str
Name data is stored under
col_name : str
Name of the column to holding the data
"""

ver = hdu.header.get('EXTVER', -1)

if isinstance(hdu, (TableHDU, BinTableHDU)):
try:
colarr = hdu.data[col_name]
except KeyError:
LOGGER.warning("Ignoring extension {} with no '{}' table "
"column".format(ver, col_name))
return

# If this table column contains text strings as expected, join the rows
# as separate lines of a string buffer and encode the resulting YAML as
# bytes that ASDF can parse. If AstroData has produced another format,
# it will be a binary dump due to the unexpected presence of non-ASCII
# data, in which case we just extract unmodified bytes from the table.
if colarr.dtype.kind in ('U', 'S'):
sep = os.linesep
# Just in case io.fits ever produces 'S' on Py 3 (not the default):
# join lines as str & avoid a TypeError with unicode linesep; could
# also use astype('U') but it assumes an encoding implicitly.
if colarr.dtype.kind == 'S' and not isinstance(sep, bytes):
colarr = np.char.decode(np.char.rstrip(colarr),
encoding='ascii')
databuf = sep.join(colarr).encode('ascii')
else:
databuf = colarr.tobytes()

# Convert the stored text to a Bytes file object that ASDF can open:
with BytesIO(databuf) as fd:

# Try to extract a 'wcs' entry from the YAML:
try:
af = asdf.open(fd)
except Exception:
LOGGER.warning("Ignoring {} extension {}: failed to parse "
"ASDF.\nError was as follows:\n{}"
.format(name, ver, traceback.format_exc()))
return
else:
with af:
try:
record = af.tree[name]
except KeyError:
LOGGER.warning("Ignoring extension {}: missing "
"'{}' dict entry.".format(ver, name))
return

else:
LOGGER.warning("Ignoring non-FITS-table '{}' extension {}"
.format(name.upper(), ver))
return

return record


def record_to_asdftablehdu(record, extver=None):
"""
Serialize a reduce record object as a FITS TableHDU (ASCII) extension.

The ASCII table is actually a mini ASDF file. The constituent AstroPy
models must have associated ASDF "tags" that specify how to serialize them.

In the event that serialization as pure ASCII fails (this should not
happen), a binary table representation will be used as a fallback.
"""
return data_to_asdftablehdu('record', 'RECORD', 'record', record, extver)


def asdftablehdu_to_record(hdu):
"""
Recreate a Reduce Record object from its serialization in a FITS table extension.

Returns None (issuing a warning) if the extension cannot be parsed, so
the rest of the file can still be read.
"""
return asdftablehdu_to_data(hdu, 'record', 'record')
8 changes: 8 additions & 0 deletions astrodata/tests/test_nddata.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,14 @@ def test_wcs_slicing():
assert nd[20, -10:].wcs(0) == (40, 20)


def test_record():
nd = NDAstroData(np.zeros((4, 4)))
record_data = {"test": "foo"}
nd.record = record_data
# should read out the same
assert nd.record == record_data


def test_access_to_other_planes(testnd):
assert hasattr(testnd, 'OBJMASK')
assert testnd.OBJMASK.shape == testnd.data.shape
Expand Down
79 changes: 73 additions & 6 deletions geminidr/interactive/fit/aperture.py
Original file line number Diff line number Diff line change
Expand Up @@ -809,6 +809,11 @@ def __init__(self, model, filename_info='', ui_params=None):
self.model = model
self.fig = None
self.help_text = DETAILED_HELP
self.inputs = dict()

# moving this here so widgets are initialized in case
# we are reloading state from saved json via --record/--replay
self.params = self.parameters_view()

# Customize the max_separation behavior away from the defaults. In particular,
# we depend on extracting some information from the model which was not readily
Expand Down Expand Up @@ -880,7 +885,7 @@ def fn():
self.make_modal(find_button, 'Recalculating Apertures...')
self.make_modal(reset_button, 'Recalculating Apertures...')

return column(
retval = column(
Div(text="Parameters to compute the profile:",
css_classes=['param_section']),
*widgets[0:5],
Expand All @@ -891,6 +896,16 @@ def fn():
width_policy="min",
)

# save our input widgets for record/load if needed
for widget in (maxaper, minsky, use_snr,
threshold, percentile, sizing, sections):
self.inputs[widget.attr] = widget

# Moving this to here so it happens before any load of saved state
self.model.recalc_apertures()

return retval

def visualize(self, doc):
"""
Build the visualization in bokeh in the given browser document.
Expand All @@ -904,7 +919,7 @@ def visualize(self, doc):

bokeh_data_color = interactive_conf().bokeh_data_color

params = self.parameters_view()
params = self.params # self.parameters_view()

ymax = 100 # we will update this when we have a profile
aperture_view = ApertureView(self.model, self.model.profile_shape, ymax)
Expand Down Expand Up @@ -944,12 +959,13 @@ def handle_clear(okc):
else row(clear_button, renumber_button),
])

self.model.recalc_apertures()
# moved to constructor, this would overwrite the results of a load()
# self.model.recalc_apertures()

col = column(children=[aperture_view.fig, helptext],
sizing_mode='scale_width')

for btn in (self.submit_button, self.abort_button):
for btn in (self.submit_button, self.abort_button, self.reset_all_button):
btn.align = 'end'
btn.height = 35
btn.height_policy = "fixed"
Expand All @@ -958,7 +974,8 @@ def handle_clear(okc):
btn.width_policy = "fixed"

toolbar = row(Spacer(width=250),
column(self.get_filename_div(), row(self.abort_button, self.submit_button)),
column(self.get_filename_div(),
row(self.reset_all_button, self.abort_button, self.submit_button)),
Spacer(width=10),
align="end", css_classes=['top-row'])

Expand Down Expand Up @@ -998,6 +1015,57 @@ def result(self):
return [[], []]
return np.array(locations), limits

def record(self):
"""
Record the state of the interactive UI.

This enhances the record from the base class with additional state
information specific to the Fit1D Visualizer. This includes per-tab
fitting parameters and the current state of the data mask.

Returns
-------
dict : Dictionary representing the state of the inputs
"""
retval = super().record()
aperture_inputs = dict()
for k, v in self.inputs.items():
aperture_inputs[k] = getattr(self.model, k)
retval["aperture_inputs"] = aperture_inputs
apertures = dict()
for aperture_id, aperture_model in self.model.aperture_models.items():
aperture = dict()
aperture['location'] = aperture_model.source.data['location'][0]
aperture['start'] = aperture_model.source.data['start'][0]
aperture['end'] = aperture_model.source.data['end'][0]
apertures[aperture_id] = aperture
retval['apertures'] = apertures
return retval

def load(self, record):
"""
Load the state of the interactive UI

This reads in the saved state of a previous run and applies it
to the visualizer.

Parameters
----------
record : dict
Dictionary with recorded state of the visualizer
"""
super().load(record)
for k, v in record["aperture_inputs"].items():
if k != 'section':
setattr(self.model, k, v)
self.inputs[k].reset()
ap_ids = list()
ap_ids.extend(self.model.aperture_models.keys())
for aperture_id in ap_ids:
self.model.delete_aperture(aperture_id)
for aperture_id, aperture in record["apertures"].items():
self.model.add_aperture(aperture["location"], aperture["start"], aperture["end"])


def interactive_find_source_apertures(ext, ui_params=None, **kwargs):
"""
Expand All @@ -1009,7 +1077,6 @@ def interactive_find_source_apertures(ext, ui_params=None, **kwargs):
also interact directly with the found aperutres as desired. When the user
hits the `Submit` button, this method will return the results of the find
to the caller.

"""
model = FindSourceAperturesModel(ext, **kwargs)
fsav = FindSourceAperturesVisualizer(model, ui_params=ui_params, filename_info=ext.filename)
Expand Down
Loading