Skip to content

Commit

Permalink
Merge pull request spacetelescope#1493 from bhilbert4/dark-mon-use-dj…
Browse files Browse the repository at this point in the history
…ango-db

Switch dark monitor to use django models
  • Loading branch information
mfixstsci authored Aug 14, 2024
2 parents 2cb0a06 + dde72b9 commit 025af4e
Show file tree
Hide file tree
Showing 6 changed files with 282 additions and 247 deletions.
116 changes: 50 additions & 66 deletions jwql/instrument_monitors/common_monitors/dark_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@
import os

from astropy.io import ascii, fits
from astropy.modeling import models
from astropy.modeling.models import Gaussian1D
from astropy.stats import sigma_clipped_stats
from astropy.time import Time
from bokeh.models import ColorBar, ColumnDataSource, HoverTool, Legend
Expand All @@ -92,22 +92,26 @@
from sqlalchemy import func
from sqlalchemy.sql.expression import and_

from jwql.database.database_interface import session, engine
from jwql.database.database_interface import NIRCamDarkQueryHistory, NIRCamDarkPixelStats, NIRCamDarkDarkCurrent
from jwql.database.database_interface import NIRISSDarkQueryHistory, NIRISSDarkPixelStats, NIRISSDarkDarkCurrent
from jwql.database.database_interface import MIRIDarkQueryHistory, MIRIDarkPixelStats, MIRIDarkDarkCurrent
from jwql.database.database_interface import NIRSpecDarkQueryHistory, NIRSpecDarkPixelStats, NIRSpecDarkDarkCurrent
from jwql.database.database_interface import FGSDarkQueryHistory, FGSDarkPixelStats, FGSDarkDarkCurrent
from jwql.instrument_monitors import pipeline_tools
from jwql.shared_tasks.shared_tasks import only_one, run_pipeline, run_parallel_pipeline
from jwql.utils import calculations, instrument_properties, mast_utils, monitor_utils
from jwql.utils.constants import ASIC_TEMPLATES, DARK_MONITOR_BETWEEN_EPOCH_THRESHOLD_TIME, DARK_MONITOR_MAX_BADPOINTS_TO_PLOT
from jwql.utils.constants import JWST_INSTRUMENT_NAMES, FULL_FRAME_APERTURES, JWST_INSTRUMENT_NAMES_MIXEDCASE
from jwql.utils.constants import JWST_DATAPRODUCTS, MINIMUM_DARK_CURRENT_GROUPS, RAPID_READPATTERNS
from jwql.utils.constants import JWST_DATAPRODUCTS, MINIMUM_DARK_CURRENT_GROUPS, ON_GITHUB_ACTIONS, ON_READTHEDOCS, RAPID_READPATTERNS
from jwql.utils.logging_functions import log_info, log_fail
from jwql.utils.permissions import set_permissions
from jwql.utils.utils import copy_files, ensure_dir_exists, get_config, filesystem_path, save_png

if not ON_GITHUB_ACTIONS and not ON_READTHEDOCS:
# Need to set up django apps before we can access the models
import django # noqa: E402 (module level import not at top of file)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jwql.website.jwql_proj.settings")
django.setup()

# Import * is okay here because this module specifically only contains database models
# for this monitor
from jwql.website.apps.jwql.monitor_models.dark_current import * # noqa: E402 (module level import not at top of file)

THRESHOLDS_FILE = os.path.join(os.path.split(__file__)[0], 'dark_monitor_file_thresholds.txt')


Expand Down Expand Up @@ -230,9 +234,9 @@ def add_bad_pix(self, coordinates, pixel_type, files, mean_filename, baseline_fi
'obs_end_time': observation_end_time,
'mean_dark_image_file': os.path.basename(mean_filename),
'baseline_file': os.path.basename(baseline_filename),
'entry_date': datetime.datetime.now()}
with engine.begin() as connection:
connection.execute(self.pixel_table.__table__.insert(), entry)
'entry_date': datetime.datetime.now(datetime.timezone.utc)}
entry = self.pixel_table(**entry)
entry.save()

def create_mean_slope_figure(self, image, num_files, hotxy=None, deadxy=None, noisyxy=None, baseline_file=None,
min_time='', max_time=''):
Expand Down Expand Up @@ -412,14 +416,15 @@ def exclude_existing_badpix(self, badpix, pixel_type):
raise ValueError('Unrecognized bad pixel type: {}'.format(pixel_type))

logging.info("\t\tRunning database query")
db_entries = session.query(self.pixel_table) \
.filter(self.pixel_table.type == pixel_type) \
.filter(self.pixel_table.detector == self.detector) \
.all()

filters = {"type__iexact": pixel_type,
"detector__iexact": self.detector
}
records = self.pixel_table.objects.filter(**filters).all()

already_found = []
if len(db_entries) != 0:
for _row in db_entries:
if records is not None:
for _row in records:
x_coords = _row.x_coord
y_coords = _row.y_coord
for x, y in zip(x_coords, y_coords):
Expand All @@ -442,7 +447,6 @@ def exclude_existing_badpix(self, badpix, pixel_type):

logging.info("\t\tKeeping {} {} pixels".format(len(new_pixels_x), pixel_type))

session.close()
return (new_pixels_x, new_pixels_y)

def exclude_too_few_groups(self, result_list):
Expand Down Expand Up @@ -521,29 +525,15 @@ def get_baseline_filename(self):
filename : str
Name of fits file containing the baseline image
"""

subq = session.query(self.pixel_table.detector,
func.max(self.pixel_table.entry_date).label('maxdate')
).group_by(self.pixel_table.detector).subquery('t2')

query = session.query(self.pixel_table).join(
subq,
and_(
self.pixel_table.detector == self.detector,
self.pixel_table.entry_date == subq.c.maxdate
)
)

count = query.count()
if not count:
filename = None
else:
filename = query.all()[0].baseline_file
record = self.pixel_table.objects.filter(detector__iexact=self.detector).order_by("-obs_end_time").first()
if record is not None:
filename = record.baseline_file
# Specify the full path
filename = os.path.join(get_config()['outputs'], 'dark_monitor', 'mean_slope_images', filename)
logging.info('Baseline filename: {}'.format(filename))
else:
filename = None

session.close()
return filename

def identify_tables(self):
Expand All @@ -552,9 +542,9 @@ def identify_tables(self):
"""

mixed_case_name = JWST_INSTRUMENT_NAMES_MIXEDCASE[self.instrument]
self.query_table = eval('{}DarkQueryHistory'.format(mixed_case_name))
self.pixel_table = eval('{}DarkPixelStats'.format(mixed_case_name))
self.stats_table = eval('{}DarkDarkCurrent'.format(mixed_case_name))
self.query_table = eval(f'{mixed_case_name}DarkQueryHistory')
self.pixel_table = eval(f'{mixed_case_name}DarkPixelStats')
self.stats_table = eval(f'{mixed_case_name}DarkDarkCurrent')

def most_recent_search(self):
"""Query the query history database and return the information
Expand All @@ -567,23 +557,18 @@ def most_recent_search(self):
Date (in MJD) of the ending range of the previous MAST query
where the dark monitor was run.
"""
query = session.query(self.query_table).filter(self.query_table.aperture == self.aperture,
self.query_table.readpattern == self.readpatt). \
filter(self.query_table.run_monitor == True) # noqa: E348 (comparison to true)

dates = np.zeros(0)
for instance in query:
dates = np.append(dates, instance.end_time_mjd)
filters = {"aperture__iexact": self.aperture,
"readpattern__iexact": self.readpatt,
"run_monitor": True}
record = self.query_table.objects.filter(**filters).order_by("-end_time_mjd").first()

query_count = len(dates)
if query_count == 0:
if record is None:
query_result = 59607.0 # a.k.a. Jan 28, 2022 == First JWST images (MIRI)
logging.info(('\tNo query history for {} with {}. Beginning search date will be set to {}.'
.format(self.aperture, self.readpatt, query_result)))
else:
query_result = np.max(dates)
query_result = record.end_time_mjd

session.close()
return query_result

def noise_check(self, new_noise_image, baseline_noise_image, threshold=1.5):
Expand Down Expand Up @@ -895,12 +880,12 @@ def process(self, file_list):
'double_gauss_width2': double_gauss_params[key][5],
'double_gauss_chisq': double_gauss_chisquared[key],
'mean_dark_image_file': os.path.basename(mean_slope_file),
'hist_dark_values': bins[key],
'hist_amplitudes': histogram[key],
'entry_date': datetime.datetime.now()
'hist_dark_values': list(bins[key]),
'hist_amplitudes': list(histogram[key]),
'entry_date': datetime.datetime.now(datetime.timezone.utc)
}
with engine.begin() as connection:
connection.execute(self.stats_table.__table__.insert(), dark_db_entry)
entry = self.stats_table(**dark_db_entry)
entry.save()

def read_baseline_slope_image(self, filename):
"""Read in a baseline mean slope image and associated standard
Expand Down Expand Up @@ -951,7 +936,7 @@ def run(self):
self.query_end = Time.now().mjd

# Loop over all instruments
for instrument in ['miri', 'nircam']: # JWST_INSTRUMENT_NAMES:
for instrument in JWST_INSTRUMENT_NAMES:
self.instrument = instrument
logging.info(f'\n\nWorking on {instrument}')

Expand Down Expand Up @@ -981,6 +966,7 @@ def run(self):

# Locate the record of the most recent MAST search
self.query_start = self.most_recent_search()

logging.info(f'\tQuery times: {self.query_start} {self.query_end}')

# Query MAST using the aperture and the time of the
Expand Down Expand Up @@ -1124,11 +1110,10 @@ def run(self):
'end_time_mjd': batch_end_time,
'files_found': len(dark_files),
'run_monitor': monitor_run,
'entry_date': datetime.datetime.now()}
'entry_date': datetime.datetime.now(datetime.timezone.utc)}

with engine.begin() as connection:
connection.execute(
self.query_table.__table__.insert(), new_entry)
entry = self.query_table(**new_entry)
entry.save()
logging.info('\tUpdated the query history table')
logging.info('NEW ENTRY: ')
logging.info(new_entry)
Expand All @@ -1146,11 +1131,10 @@ def run(self):
'end_time_mjd': self.query_end,
'files_found': len(new_entries),
'run_monitor': monitor_run,
'entry_date': datetime.datetime.now()}
'entry_date': datetime.datetime.now(datetime.timezone.utc)}

with engine.begin() as connection:
connection.execute(
self.query_table.__table__.insert(), new_entry)
entry = self.query_table(**new_entry)
entry.save()
logging.info('\tUpdated the query history table')
logging.info('NEW ENTRY: ')
logging.info(new_entry)
Expand Down Expand Up @@ -1546,7 +1530,7 @@ def stats_by_amp(self, image, amps):
amplitude, peak, width = calculations.gaussian1d_fit(bin_centers, hist, initial_params)
gaussian_params[key] = [amplitude, peak, width]

gauss_fit_model = models.Gaussian1D(amplitude=amplitude[0], mean=peak[0], stddev=width[0])
gauss_fit_model = Gaussian1D(amplitude=amplitude[0], mean=peak[0], stddev=width[0])
gauss_fit = gauss_fit_model(bin_centers)

positive = hist > 0
Expand Down
2 changes: 1 addition & 1 deletion jwql/utils/instrument_properties.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ def get_obstime(filename):
time = h[0].header['TIME-OBS']
year, month, day = [int(element) for element in date.split('-')]
hour, minute, second = [float(element) for element in time.split(':')]
return datetime.datetime(year, month, day, int(hour), int(minute), int(second))
return datetime.datetime(year, month, day, int(hour), int(minute), int(second), tzinfo=datetime.timezone.utc)


def mean_time(times):
Expand Down
40 changes: 40 additions & 0 deletions jwql/website/apps/jwql/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,3 +208,43 @@ class Meta:
def __str__(self):
"""Container for all anomalies associated with each RootFileInfo object """
return self.root_file_info.root_name


def get_model_column_names(model_name):
"""Return all column names for the input ``model_name`` as a list
Parameters
----------
model_name : django.db.models.base.ModelBase
e.g. model_name = eval('NIRCamDarkDarkCurrent')
Returns
-------
colnames : list
List of column names
"""
return [f.name for f in model_name._meta.get_fields()]


def get_unique_values_per_column(model_name, column_name):
"""Return a list of the unique values present in the column ``column_name`` in
the ``model_name`` model.
Parameters
----------
model_name : django.db.models.base.ModelBase
e.g. model_name = eval('NIRCamDarkDarkCurrent')
column_name : str
Column name to examine
Returns
-------
values : list
List of unique values in ``column_name``
"""
query_set = model_name.objects.values(column_name).distinct()
values = []
for row in query_set:
values.append(row[column_name])
return values
Loading

0 comments on commit 025af4e

Please sign in to comment.