Skip to content

Commit

Permalink
Merge branch 'main' into gains-for-2amp
Browse files Browse the repository at this point in the history
  • Loading branch information
julienguy committed Jun 4, 2024
2 parents 6cdaa4f + dec037f commit b417b19
Show file tree
Hide file tree
Showing 4 changed files with 126 additions and 62 deletions.
31 changes: 1 addition & 30 deletions bin/desi_tsnr_afterburner
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ from desiutil.log import get_logger
from desispec.tsnr import calc_tsnr2,tsnr2_to_efftime
from astropy.table import Table, vstack
from desiutil.depend import getdep
from desispec.tilecompleteness import compute_tile_completeness_table,merge_tile_completeness_table
from desispec.tilecompleteness import read_gfa_data, compute_tile_completeness_table, merge_tile_completeness_table
from desispec.skymag import compute_skymag
from desispec.efftime import compute_efftime
from desispec.parallel import stdouterr_redirected, use_mpi
Expand Down Expand Up @@ -163,35 +163,6 @@ def update_targ_info(entry, targ_in):
return entry


def read_gfa_data(gfa_proc_dir) :
"""
Read the directory with the offline GFA data reduction (like /global/cfs/cdirs/desi/survey/GFA/),
find the latest version of the table files for the various surveys, return the merged table.
See documentation here https://desi.lbl.gov/trac/wiki/SurveyValidation/SV1/conditions/summary_files
Args:
gfa_proc_dir: str, directory path
returns astropy.table.Table
"""
log = get_logger()
tables=[]
for survey in ["SV1","SV2","SV3"] :
filenames=sorted(glob.glob("{}/offline_matched_coadd_ccds_{}-thru_????????.fits".format(gfa_proc_dir,survey)))
if len(filenames)==0 : continue
filename=filenames[-1]
log.info(f"Reading {filename}")
table=read_table(filename,2)# HDU2 is average over frames during spectro exposure and median across CCDs
tables.append(table)
if len(tables)==0 :
log=get_logger()
mess="did not find any file offline_matched_coadd_ccds_*-thru_????????.fits in {}".format(gfa_proc_dir)
log.critical(mess)
raise RuntimeError(mess)
table=vstack(tables)
log.info(f'{len(table)} GFA table entries')
return table



def compute_tsnr_values(cframe_filename,cframe_hdulist,night,expid,camera,specprod_dir, alpha_only=False) :
"""
Computes TSNR values
Expand Down
12 changes: 12 additions & 0 deletions doc/changes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,21 @@
desispec Change Log
===================

0.64.0 (unreleased)
-------------------

0.64.x and beyond reserved for post-Jura changes.

* Refactor read_gfa_function to support gfa_proc writing main-thruNIGHT
files instead of sv3-thruNIGHT files (PR `#2252`_).

.. _`#2252`: https://github.com/desihub/desispec/pull/2252

0.63.7 (unreleased)
-------------------

0.63.x tags reserved for Jura prods.

* No changes yet.

0.63.6 (2024-05-30)
Expand Down
52 changes: 52 additions & 0 deletions py/desispec/test/test_tilecompleteness.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""Test desispec.tilecompleteness.
"""
import os
import unittest
from unittest.mock import patch, call
from ..tilecompleteness import read_gfa_data


class TestTileCompleteness(unittest.TestCase):
"""Test desispec.tilecompleteness.
"""

@patch('desispec.tilecompleteness.vstack')
@patch('desispec.tilecompleteness.read_table')
@patch('desispec.tilecompleteness.glob')
@patch('desispec.tilecompleteness.get_logger')
def test_read_gfa_data(self, mock_log, mock_glob, mock_table, mock_vstack):
"""Test identification of the most recent GFA file.
"""
gfa_proc_dir = '/global/cfs/cdirs/desi/survey/GFA'
mock_glob.glob.side_effect = [[os.path.join(gfa_proc_dir, 'offline_matched_coadd_ccds_SV1-thru_20201231.fits'),],
[os.path.join(gfa_proc_dir, 'offline_matched_coadd_ccds_SV1-thru_20201231.fits'),
os.path.join(gfa_proc_dir, 'offline_matched_coadd_ccds_SV2-thru_20210101.fits'),
os.path.join(gfa_proc_dir, 'offline_matched_coadd_ccds_SV3-thru_20210102.fits'),
os.path.join(gfa_proc_dir, 'offline_matched_coadd_ccds_main-thru_20210103.fits'),
os.path.join(gfa_proc_dir, 'offline_matched_coadd_ccds_foobar-thru_20210104.fits'),]]
mock_table.side_effect = [[(1, 2, 3, 4),], [(1, 2, 3, 4), (1, 2, 3, 4), (1, 2, 3, 4),]]
mock_vstack.return_value = [(1, 2, 3, 4), (1, 2, 3, 4), (1, 2, 3, 4), (1, 2, 3, 4)]
table = read_gfa_data(gfa_proc_dir)
self.assertListEqual(table, [(1, 2, 3, 4), (1, 2, 3, 4), (1, 2, 3, 4), (1, 2, 3, 4)])
mock_table.assert_has_calls([call(os.path.join(gfa_proc_dir, 'offline_matched_coadd_ccds_SV1-thru_20201231.fits'), 2),
call(os.path.join(gfa_proc_dir, 'offline_matched_coadd_ccds_foobar-thru_20210104.fits'), 2),])
mock_log().info.assert_has_calls([call("Reading SV1 file: %s", os.path.join(gfa_proc_dir, 'offline_matched_coadd_ccds_SV1-thru_20201231.fits')),
call('%d GFA SV1 table entries', 1),
call("Reading most recent file: %s", os.path.join(gfa_proc_dir, 'offline_matched_coadd_ccds_foobar-thru_20210104.fits')),
call('%d GFA table entries', 3),
call('%d GFA merged table entries', 4)])

@patch('desispec.tilecompleteness.glob')
@patch('desispec.tilecompleteness.get_logger')
def test_read_gfa_data_no_files(self, mock_log, mock_glob):
"""Test identification of the most recent GFA file.
"""
gfa_proc_dir = '/global/cfs/cdirs/desi/survey/GFA'
mock_glob.glob.return_value = []
with self.assertRaises(RuntimeError) as e:
table = read_gfa_data(gfa_proc_dir)
self.assertEqual(str(e.exception), "did not find any file offline_matched_coadd_ccds_*-thru_????????.fits in %s" % gfa_proc_dir)
mock_log().warning.assert_called_once_with("Could not find a SV1 file, skipping!")
mock_log().critical.assert_called_once_with("did not find any file offline_matched_coadd_ccds_*-thru_????????.fits in %s", gfa_proc_dir)
93 changes: 61 additions & 32 deletions py/desispec/tilecompleteness.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,74 @@
and tiles completion.
"""

import os,sys
import os
import numpy as np
import yaml
import glob
from astropy.table import Table,vstack
from astropy.table import Table, vstack

from desispec.io.util import checkgzip
from desispec.io import read_table

from desiutil.log import get_logger


def read_gfa_data(gfa_proc_dir):
"""Find the most recent GFA summary file in `gfa_proc_dir`.
In addition, this function will read an "SV1" file to get earlier EXPIDs.
The "SV1" file will be concatenated with the most recent summary to produce
the returned table. There is some duplication of rows in this process, but
because survey phases did overlap, it's not really possible to separate the
summaries into distinct phases anyway.
See also documentation here: https://desi.lbl.gov/trac/wiki/SurveyValidation/SV1/conditions/summary_files.
Parameters
----------
gfa_proc_dir : :class:`str`
The GFA directory. Usually this will be ``/global/cfs/cdirs/desi/survey/GFA/``.
Returns
-------
:class:`~astropy.table.Table`
The summary data read from HDU2 of the files.
"""
log = get_logger()
#
# First read the SV1 file, there should be only one at this point.
#
filenames = glob.glob(os.path.join(gfa_proc_dir, 'offline_matched_coadd_ccds_SV1-thru_????????.fits'))
if len(filenames) == 1:
log.info("Reading SV1 file: %s", filenames[0])
tables = [read_table(filenames[0], 2)]
log.info('%d GFA SV1 table entries', len(tables[0]))
else:
log.warning("Could not find a SV1 file, skipping!")
tables = []
#
# Find the most recent file, independently of label.
#
filenames = glob.glob(os.path.join(gfa_proc_dir, 'offline_matched_coadd_ccds_*-thru_????????.fits'))
if len(filenames) == 0:
mess = "did not find any file offline_matched_coadd_ccds_*-thru_????????.fits in %s"
log.critical(mess, gfa_proc_dir)
raise RuntimeError(mess % gfa_proc_dir)
#
# Sort the filenames by night, independently of any other label.
#
night_filename = dict([(os.path.basename(f).split('_')[-1].split('.')[0], f) for f in filenames])
last_night = sorted(night_filename.keys())[-1]
filename = night_filename[last_night]
log.info("Reading most recent file: %s", filename)
tables.append(read_table(filename, 2)) # HDU2 is average over frames during spectro exposure and median across CCDs.
log.info('%d GFA table entries', len(tables[1]))
#
# Merge the tables.
#
table = vstack(tables)
log.info('%d GFA merged table entries', len(table))
return table


def compute_tile_completeness_table(exposure_table,specprod_dir,auxiliary_table_filenames,min_number_of_petals=8) :
""" Computes a summary table of the observed tiles
Expand Down Expand Up @@ -296,31 +353,3 @@ def merge_tile_completeness_table(previous_table,new_table) :
res = res[ii]

return res

def number_of_good_redrock(tileid,night,specprod_dir,warn=True) :

log=get_logger()
nok=0
for spectro in range(10) :

# coadd_filename = os.path.join(specprod_dir,"tiles/cumulative/{}/{}/coadd-{}-{}-thru{}.fits".format(tileid,night,spectro,tileid,night))
coadd_filename, exists = findfile('coadd', night=night, tile=tileid,
spectrograph=spectro, groupname='cumulative',
specprod_dir=specprod_dir, return_exists=True)
if not exists:
if warn: log.warning("missing {}".format(coadd_filename))
continue

# redrock_filename = os.path.join(specprod_dir,"tiles/cumulative/{}/{}/redrock-{}-{}-thru{}.fits".format(tileid,night,spectro,tileid,night))
redrock_filename, exists = findfile('redrock', night=night, tile=tileid,
spectrograph=spectro, groupname='cumulative',
specprod_dir=specprod_dir, return_exists=True)
if not exists:
if warn : log.warning("missing {}".format(redrock_filename))
continue

# do more tests

nok+=1

return nok

0 comments on commit b417b19

Please sign in to comment.