Skip to content

Commit

Permalink
update UPDATED
Browse files Browse the repository at this point in the history
  • Loading branch information
weaverba137 committed Sep 17, 2024
1 parent 9d050b6 commit 1896576
Show file tree
Hide file tree
Showing 2 changed files with 166 additions and 92 deletions.
120 changes: 81 additions & 39 deletions bin/desi_tsnr_afterburner
Original file line number Diff line number Diff line change
Expand Up @@ -276,19 +276,35 @@ def update_table(table1,table2,keys) :
def compute_summary_tables(summary_rows, preexisting_tsnr2_expid_table,
preexisting_tsnr2_frame_table, specprod_dir,
add_badexp=True, nights=None):
""" Compute summary tables.
Args:
summary_rows: list of dictionaries
preexisting_tsnr2_expid_table: None or astropy.table.Table, update this table if not None
preexisting_tsnr2_frame_table: None or astropy.table.Table, update this table if not None
specprod_dir: str, production directory
Options:
add_badexp (bool): add known bad exposures that weren't processed
nights: None or list, list of nights to compute bad exposures summary tales for
Returns tsnr2_expid_table , tsnr2_frame_table
"""Compute exposure and frame summary tables.
Parameters
----------
summary_rows : :class:`list`
A list of :class:`dict` describing columns. In nightly operations, this
is an empty list.
preexisting_tsnr2_expid_table : :class:`~astropy.table.Table`
Update this exposure table if the input is not ``None``. In nightly operations,
this will always be set.
preexisting_tsnr2_frame_table : :class:`~astropy.table.Table`
Update this frame table if the input is not ``None``. In nightly operations,
this will always be set.
specprod_dir : :class:`str`
Production directory.
add_badexp : :class:`bool`, optional
Add known bad exposures that weren't processed. Default is ``True``.
nights : :class:`list`, optional
List of nights for which to compute bad exposures summary.
Returns
-------
:class:`tuple`
A tuple containing the updated exposure and frames tables.
Raises
------
KeyError
If the columns cannot be placed in the proper order.
"""
log = get_logger()

Expand Down Expand Up @@ -509,7 +525,6 @@ def compute_summary_tables(summary_rows, preexisting_tsnr2_expid_table,
else:
exp_summary['EFFTIME_SPEC'][ii] = exp_summary['BGS_EFFTIME_BRIGHT'][ii]


if preexisting_tsnr2_expid_table is not None :
log.debug("Update to preexisting")

Expand Down Expand Up @@ -552,12 +567,13 @@ def compute_summary_tables(summary_rows, preexisting_tsnr2_expid_table,
'SKY_MAG_R_SPEC','SKY_MAG_Z_SPEC','EFFTIME_GFA','EFFTIME_DARK_GFA','EFFTIME_BRIGHT_GFA',
'EFFTIME_BACKUP_GFA']

if not np.all(np.in1d(exp_summary.dtype.names,neworder)) :
missing=~np.in1d(exp_summary.dtype.names,neworder)
log.critical("missing keys {} in new order list".format(np.array(exp_summary.dtype.names)[missing]))
log.error("new order list:",sorted(neworder))
log.error("current table:",sorted(exp_summary.dtype.names))
sys.exit(12)
if not np.all(np.in1d(exp_summary.dtype.names, neworder)):
missing = ~np.in1d(exp_summary.dtype.names, neworder)
msg = "Missing keys, '{}' in new order list!".format(np.array(exp_summary.dtype.names)[missing])
log.critical(msg)
log.critical("new order list:",sorted(neworder))
log.critical("current table:",sorted(exp_summary.dtype.names))
raise KeyError(msg)
newtable=Table()
newtable.meta=exp_summary.meta
for k in neworder :
Expand All @@ -569,6 +585,7 @@ def compute_summary_tables(summary_rows, preexisting_tsnr2_expid_table,

return exp_summary, cam_summary


def write_summary_tables(tsnr2_expid_table,tsnr2_frame_table,output_fits_filename,output_csv_filename=None) :
"""Writes a summary fits file.
Expand Down Expand Up @@ -926,6 +943,24 @@ def main():
summary_rows = list()

def one_night(count, night, rows, tmpfilename):
"""Prepare multiprocessing steps for analyzing one night.
Parameters
----------
count : :class:`int`
A MPI rank number. This may be unused.
night : :class:`str`
The night to analyze.
rows : :class:`list`
A list that will be passed to :func:`compute_summary_tables`.
tmpfilename : :class:`str`
A temporary filename.
Returns
-------
:class:`int`
An integer suitable for passing to :func:`sys.exit`.
"""
dirnames = sorted(glob.glob('{}/exposures/{}/*'.format(args.prod,night)))
night_expids=[]
for dirname in dirnames :
Expand All @@ -937,7 +972,7 @@ def main():
if expids is not None :
night_expids = np.intersect1d(expids,night_expids)
if night_expids.size == 0 :
return
return 0
log.info("{} {}".format(night,night_expids))

func_args = []
Expand All @@ -961,14 +996,20 @@ def main():
rows.append(entry)

# write result after every night.
if len(rows)>0 or args.add_badexp:
tsnr2_expid_table,tsnr2_frame_table = compute_summary_tables(rows,
preexisting_tsnr2_expid_table,
preexisting_tsnr2_frame_table,
args.prod, nights=[night],
add_badexp=args.add_badexp)
write_summary_tables(tsnr2_expid_table,tsnr2_frame_table,output_fits_filename=tmpfilename)
log.info("wrote {} entries in tmp file {}".format(len(rows),tmpfilename))
if len(rows) > 0 or args.add_badexp:
try:
tsnr2_expid_table, tsnr2_frame_table = compute_summary_tables(rows,
preexisting_tsnr2_expid_table,
preexisting_tsnr2_frame_table,
args.prod, nights=[night],
add_badexp=args.add_badexp)
except KeyError:
# Error messages have already been logged at this point.
return 12
write_summary_tables(tsnr2_expid_table, tsnr2_frame_table, output_fits_filename=tmpfilename)
log.info("wrote {} entries in tmp file {}".format(len(rows), tmpfilename))

return 0

if comm is not None:
comm.barrier()
Expand All @@ -995,18 +1036,24 @@ def main():
for count,night in enumerate(ranknights):
tmpfilename=args.outfile.replace(".fits","_tmp_{:d}.fits".format(rank))

one_night(count, night, rank_summary_rows, tmpfilename)
one_night_status= one_night(count, night, rank_summary_rows, tmpfilename)
#
# Not sure of the best way to exit in this case.
#
else:
log.warning('Rank {:d} has no nights to process'.format(rank))


comm.barrier()

else:
for count,night in enumerate(nights):
tmpfilename=args.outfile.replace(".fits","_tmp.fits")
for count, night in enumerate(nights):
tmpfilename=args.outfile.replace(".fits", "_tmp.fits")

one_night(count, night, summary_rows, tmpfilename)
one_night_status = one_night(count, night, summary_rows, tmpfilename)

if one_night_status != 0:
return one_night_status

if (comm is None) | (rank == 0):
log.info('Gathering TSNR tmp tables (over ranks: {}, {})'.format(multinode, size))
Expand Down Expand Up @@ -1046,11 +1093,6 @@ def main():
if multinode:
write_summary_tables(tsnr2_expid_table,tsnr2_frame_table,output_fits_filename=args.outfile.replace('.fits', '_tmp.fits'))

# tsnr2_expid_table,tsnr2_frame_table = compute_summary_tables(summary_rows,
# preexisting_tsnr2_expid_table,
# preexisting_tsnr2_frame_table,args.prod,
# add_badexp=add_badexp)

if args.skymags is not None :
skymags_table = Table.read(args.skymags)
add_skymags_columns(tsnr2_expid_table,skymags_table)
Expand Down Expand Up @@ -1175,7 +1217,7 @@ def main():
new_tile_table = compute_tile_completeness_table(exposure_table[selection],args.prod,auxiliary_table_filenames=args.aux)
if os.path.isfile(args.tile_completeness) :
previous_table = Table.read(args.tile_completeness)
new_tile_table = merge_tile_completeness_table(previous_table,new_tile_table)
new_tile_table = merge_tile_completeness_table(previous_table, new_tile_table)
new_tile_table.write(args.tile_completeness,overwrite=True)
log.info("wrote {}".format(args.tile_completeness))

Expand Down
138 changes: 85 additions & 53 deletions py/desispec/tilecompleteness.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,17 @@
"""

import os
import numpy as np
import datetime
import glob
from astropy.table import Table, vstack

from desispec.io import read_table
import pytz
import numpy as np
from astropy.table import Table, vstack

from desiutil.log import get_logger

from desispec.io import read_table


def read_gfa_data(gfa_proc_dir):
"""Find the most recent GFA summary file in `gfa_proc_dir`.
Expand Down Expand Up @@ -252,7 +255,7 @@ def compute_tile_completeness_table(exposure_table,specprod_dir,auxiliary_table_
return res

def reorder_columns(table) :
neworder=['TILEID','SURVEY','PROGRAM','FAPRGRM','FAFLAVOR','NEXP','EXPTIME','TILERA','TILEDEC','EFFTIME_ETC','EFFTIME_SPEC','EFFTIME_GFA','GOALTIME','OBSSTATUS','LRG_EFFTIME_DARK','ELG_EFFTIME_DARK','BGS_EFFTIME_BRIGHT','LYA_EFFTIME_DARK','GOALTYPE','MINTFRAC','LASTNIGHT']
neworder=['TILEID','SURVEY','PROGRAM','FAPRGRM','FAFLAVOR','NEXP','EXPTIME','TILERA','TILEDEC','EFFTIME_ETC','EFFTIME_SPEC','EFFTIME_GFA','GOALTIME','OBSSTATUS','LRG_EFFTIME_DARK','ELG_EFFTIME_DARK','BGS_EFFTIME_BRIGHT','LYA_EFFTIME_DARK','GOALTYPE','MINTFRAC','LASTNIGHT','UPDATED']

if not np.all(np.in1d(neworder,table.dtype.names)) or not np.all(np.in1d(table.dtype.names,neworder)) :
log = get_logger()
Expand Down Expand Up @@ -289,83 +292,112 @@ def is_same_table_rows(table1,index1,table2,index2) :
return True


def merge_tile_completeness_table(previous_table,new_table) :
""" Merges tile summary tables.
Args:
previous_table: astropy.table.Table
new_table: astropy.table.Table
Returns: astropy.table.Table with merged entries.
"""
def merge_tile_completeness_table(previous_table, new_table):
"""Merges tile summary tables.
log = get_logger()
The ``UPDATED`` column contains a timestamp that will be
set to the current time for any entries in `new_table`.
# first check columns and add in previous if missing
for k in new_table.dtype.names :
if not k in previous_table.dtype.names :
log.info("New column {}".format(k))
previous_table[k] = np.zeros(len(previous_table),dtype=new_table[k].dtype)
Parameters
----------
previous_table : :class:`~astropy.table.Table`
The previous version of the tile completeness table.
new_table : :class:`~astropy.table.Table`
The new tile completeness data, usually from one new night.
# check whether there is any difference for the new ones
t2i={t:i for i,t in enumerate(previous_table["TILEID"])}
Returns
-------
:class:`~astropy.table.Table`
The table with merged entries.
"""
log = get_logger()
#
# First check columns and add in previous if missing.
#
for column in new_table.colnames:
if column not in previous_table.colnames:
log.info("Adding new column to previous data: '%s'.", column)
previous_table[column] = np.zeros(len(previous_table), dtype=new_table[column].dtype)
#
# Check whether there is any difference in new_table
#
t2i={t: i for i, t in enumerate(previous_table["TILEID"])}

nadd=0
nmod=0
nforcekeep=0

# keep all tiles that are not in the new table
keep_from_previous = list(np.where(~np.in1d(previous_table["TILEID"],new_table["TILEID"]))[0])
#
# Keep all tiles that are not in new_table.
#
keep_from_previous = list(np.where(~np.in1d(previous_table["TILEID"], new_table["TILEID"]))[0])
nsame = len(keep_from_previous)

add_from_new = []
for j,t in enumerate(new_table["TILEID"]) :
if t not in t2i :
for j, t in enumerate(new_table["TILEID"]):
if t not in t2i:
nadd += 1
add_from_new.append(j)
continue
i=t2i[t]

if is_same_table_rows(previous_table,i,new_table,j) :
i = t2i[t]
if is_same_table_rows(previous_table, i, new_table, j):
nsame += 1
keep_from_previous.append(i)
continue

# do some sanity check
any_change=False
for k in ["SURVEY","GOALTYPE"] :
if new_table[k][j] == "unknown" and previous_table[k][i] != "unknown" :
log.warning("IGNORE change for tile {} of {}: {} -> {}".format(t,k,previous_table[k][i],new_table[k][j]))
new_table[k][j] = previous_table[k][i]
any_change=True
#
# Perform some sanity checks.
#
any_change = False
for column in ("SURVEY", "GOALTYPE"):
if new_table[column][j] == "unknown" and previous_table[column][i] != "unknown":
log.warning("IGNORE change in column %s for tile %d: '%s' -> '%s'.",
column, t, str(previous_table[column][i]), str(new_table[column][j]))
new_table[column][j] = previous_table[column][i]
any_change = True

survey = new_table["SURVEY"][j]
if survey in ["cmx","sv1","sv2","sv3"]:
for k in ["GOALTIME","OBSSTATUS"] :
if new_table[k][j] != previous_table[k][i] :
log.warning("IGNORE change for tile {} of {}: {} -> {}".format(t,k,previous_table[k][i],new_table[k][j]))
new_table[k][j] = previous_table[k][i]
any_change=True

if any_change : # recheck if still different
if is_same_table_rows(previous_table,i,new_table,j) :
if survey in ("cmx", "sv1", "sv2", "sv3"):
for column in ("GOALTIME", "OBSSTATUS"):
if new_table[column][j] != previous_table[column][i]:
log.warning("IGNORE change in column %s for tile %d: '%s' -> '%s'.",
column, t, str(previous_table[column][i]), str(new_table[column][j]))
new_table[column][j] = previous_table[column][i]
any_change = True
#
# Redo check to see if there are still any differences.
#
if any_change:
if is_same_table_rows(previous_table, i, new_table, j):
nsame += 1
keep_from_previous.append(i)
continue

nmod += 1
add_from_new.append(j)

log.info("{} tiles unchanged".format(nsame))
log.info("{} tiles modified".format(nmod))
log.info("{} tiles added".format(nadd))

if len(add_from_new)>0 :
res = vstack( [ previous_table[keep_from_previous] , new_table[add_from_new] ] )
else :
log.info("%d tiles unchanged.", nsame)
log.info("%d tiles modified.", nmod)
log.info("%d tiles added.", nadd)
#
# Stack the tables.
#
if len(add_from_new) > 0:
#
# Set the UPDATED column.
#
timestamp = datetime.datetime.now(tz=pytz.timezone('US/Pacific')).strftime("%Y-%m-%dT%H:%M:%S%z")
new_table['UPDATED'][add_from_new] = timestamp
res = vstack([previous_table[keep_from_previous],
new_table[add_from_new]])
else:
res = previous_table

#
# Set column order.
#
res = reorder_columns(res)
# reorder rows
#
# Reorder rows.
#
ii = np.argsort(res["LASTNIGHT"])
res = res[ii]

Expand Down

0 comments on commit 1896576

Please sign in to comment.