Skip to content

Commit

Permalink
add option to use rtp_ant_class.csv files instead of diffs
Browse files Browse the repository at this point in the history
  • Loading branch information
jsdillon committed Nov 25, 2024
1 parent ea9fd7c commit e73a837
Showing 1 changed file with 76 additions and 26 deletions.
102 changes: 76 additions & 26 deletions notebooks/file_calibration.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
"source": [
"# Single File Calibration\n",
"\n",
"**by Josh Dillon, Aaron Parsons, Tyler Cox, and Zachary Martinot**, last updated July 15, 2024\n",
"**by Josh Dillon, Aaron Parsons, Tyler Cox, and Zachary Martinot**, last updated November 25, 2024\n",
"\n",
"This notebook is designed to infer as much information about the array from a single file, including pushing the calibration and RFI mitigation as far as possible. Calibration includes redundant-baseline calibration, RFI-based calibration of delay slopes, model-based calibration of overall amplitudes, and a full per-frequency phase gradient absolute calibration if abscal model files are available.\n",
"\n",
Expand Down Expand Up @@ -106,11 +106,15 @@
"SAVE_RESULTS = os.environ.get(\"SAVE_RESULTS\", \"TRUE\").upper() == \"TRUE\"\n",
"SAVE_OMNIVIS_FILE = os.environ.get(\"SAVE_OMNIVIS_FILE\", \"FALSE\").upper() == \"TRUE\"\n",
"\n",
"\n",
"# get infile names\n",
"SUM_FILE = os.environ.get(\"SUM_FILE\", None)\n",
"# SUM_FILE = '/lustre/aoc/projects/hera/h6c-analysis/IDR2/2459867/zen.2459867.46002.sum.uvh5' # If sum_file is not defined in the environment variables, define it here.\n",
"DIFF_FILE = SUM_FILE.replace('sum', 'diff')\n",
"USE_DIFF = os.environ.get(\"USE_DIFF\", \"TRUE\").upper() == \"TRUE\"\n",
"if USE_DIFF:\n",
" DIFF_FILE = SUM_FILE.replace('sum', 'diff')\n",
"else:\n",
" DIFF_FILE = None\n",
" RTP_ANTCLASS = SUM_FILE.replace('.uvh5', '.rtp_ant_class.csv')\n",
"\n",
"# get outfilenames\n",
"AM_FILE = (SUM_FILE.replace('.uvh5', '.ant_metrics.hdf5') if SAVE_RESULTS else None)\n",
Expand Down Expand Up @@ -259,8 +263,9 @@
"read_start = time.time()\n",
"hd = io.HERADataFastReader(SUM_FILE)\n",
"data, _, _ = hd.read(read_flags=False, read_nsamples=False)\n",
"hd_diff = io.HERADataFastReader(DIFF_FILE)\n",
"diff_data, _, _ = hd_diff.read(read_flags=False, read_nsamples=False, dtype=np.complex64, fix_autos_func=np.real)\n",
"if USE_DIFF:\n",
" hd_diff = io.HERADataFastReader(DIFF_FILE)\n",
" diff_data, _, _ = hd_diff.read(read_flags=False, read_nsamples=False, dtype=np.complex64, fix_autos_func=np.real)\n",
"print(f'Finished loading data in {(time.time() - read_start) / 60:.2f} minutes.')"
]
},
Expand Down Expand Up @@ -300,6 +305,44 @@
"## Classify good, suspect, and bad antpols"
]
},
{
"cell_type": "markdown",
"id": "ae4f2573-c931-472d-a0dd-f1642e3d6767",
"metadata": {},
"source": [
"### Load classifications that use diffs if diffs are not available"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d96b60d2-2efd-46da-b10a-2c4c0373986b",
"metadata": {},
"outputs": [],
"source": [
"if not USE_DIFF:\n",
" def read_antenna_classification(df, category):\n",
" ac = ant_class.AntennaClassification()\n",
" ac._data = {}\n",
" for antname, class_data, antclass in zip(df['Antenna'], df[category], df[f'{category} Class']):\n",
" try: \n",
" class_data = float(class_data)\n",
" except:\n",
" pass\n",
" if isinstance(class_data, str) or np.isfinite(class_data):\n",
" ant = (int(antname[:-1]), utils._comply_antpol(antname[-1]))\n",
" ac[ant] = antclass\n",
" ac._data[ant] = class_data\n",
" return ac\n",
"\n",
" df = pd.read_csv(RTP_ANTCLASS)\n",
" am_totally_dead = read_antenna_classification(df, 'Dead?')\n",
" am_corr = read_antenna_classification(df, 'Low Correlation')\n",
" am_xpol = read_antenna_classification(df, 'Cross-Polarized')\n",
" zeros_class = read_antenna_classification(df, 'Even/Odd Zeros')\n",
" xengine_diff_class = read_antenna_classification(df, 'Bad Diff X-Engines')"
]
},
{
"cell_type": "markdown",
"id": "f0450fb4",
Expand All @@ -317,11 +360,12 @@
"metadata": {},
"outputs": [],
"source": [
"am = ant_metrics.AntennaMetrics(SUM_FILE, DIFF_FILE, sum_data=data, diff_data=diff_data)\n",
"am.iterative_antenna_metrics_and_flagging(crossCut=am_xpol_bad[1], deadCut=am_corr_bad[1])\n",
"am.all_metrics = {} # this saves time and disk by getting rid of per-iteration information we never use\n",
"if SAVE_RESULTS:\n",
" am.save_antenna_metrics(AM_FILE, overwrite=True)"
"if USE_DIFF:\n",
" am = ant_metrics.AntennaMetrics(SUM_FILE, DIFF_FILE, sum_data=data, diff_data=diff_data)\n",
" am.iterative_antenna_metrics_and_flagging(crossCut=am_xpol_bad[1], deadCut=am_corr_bad[1])\n",
" am.all_metrics = {} # this saves time and disk by getting rid of per-iteration information we never use\n",
" if SAVE_RESULTS:\n",
" am.save_antenna_metrics(AM_FILE, overwrite=True)"
]
},
{
Expand All @@ -333,14 +377,15 @@
},
"outputs": [],
"source": [
"# Turn ant metrics into classifications\n",
"totally_dead_ants = [ant for ant, i in am.xants.items() if i == -1]\n",
"am_totally_dead = ant_class.AntennaClassification(good=[ant for ant in ants if ant not in totally_dead_ants], bad=totally_dead_ants)\n",
"am_corr = ant_class.antenna_bounds_checker(am.final_metrics['corr'], bad=[am_corr_bad], suspect=[am_corr_suspect], good=[(0, 1)])\n",
"am_xpol = ant_class.antenna_bounds_checker(am.final_metrics['corrXPol'], bad=[am_xpol_bad], suspect=[am_xpol_suspect], good=[(-1, 1)])\n",
"ant_metrics_class = am_totally_dead + am_corr + am_xpol\n",
"if np.all([ant_metrics_class[utils.split_bl(bl)[0]] == 'bad' for bl in auto_bls]):\n",
" raise ValueError('All antennas are flagged for ant_metrics.')"
"if USE_DIFF:\n",
" # Turn ant metrics into classifications\n",
" totally_dead_ants = [ant for ant, i in am.xants.items() if i == -1]\n",
" am_totally_dead = ant_class.AntennaClassification(good=[ant for ant in ants if ant not in totally_dead_ants], bad=totally_dead_ants)\n",
" am_corr = ant_class.antenna_bounds_checker(am.final_metrics['corr'], bad=[am_corr_bad], suspect=[am_corr_suspect], good=[(0, 1)])\n",
" am_xpol = ant_class.antenna_bounds_checker(am.final_metrics['corrXPol'], bad=[am_xpol_bad], suspect=[am_xpol_suspect], good=[(-1, 1)])\n",
" if np.all([ant_metrics_class[utils.split_bl(bl)[0]] == 'bad' for bl in auto_bls]):\n",
" raise ValueError('All antennas are flagged for ant_metrics.')\n",
"ant_metrics_class = am_totally_dead + am_corr + am_xpol "
]
},
{
Expand Down Expand Up @@ -379,7 +424,8 @@
"metadata": {},
"outputs": [],
"source": [
"zeros_class = ant_class.even_odd_zeros_checker(data, diff_data, good=good_zeros_per_eo_spectrum, suspect=suspect_zeros_per_eo_spectrum)\n",
"if USE_DIFF:\n",
" zeros_class = ant_class.even_odd_zeros_checker(data, diff_data, good=good_zeros_per_eo_spectrum, suspect=suspect_zeros_per_eo_spectrum)\n",
"if np.all([zeros_class[utils.split_bl(bl)[0]] == 'bad' for bl in auto_bls]):\n",
" raise ValueError('All antennas are flagged for too many even/odd zeros.')"
]
Expand Down Expand Up @@ -458,12 +504,14 @@
"metadata": {},
"outputs": [],
"source": [
"xengine_diff_class = ant_class.non_noiselike_diff_by_xengine_checker(data, diff_data, flag_waterfall=array_flags, \n",
" antenna_class=overall_class, \n",
" xengine_chans=96, bad_xengine_zcut=bad_xengine_zcut)\n",
"overall_class += xengine_diff_class\n",
"if np.all([overall_class[utils.split_bl(bl)[0]] == 'bad' for bl in auto_bls]):\n",
" raise ValueError('All antennas are flagged after flagging non-noiselike diffs.')"
"if USE_DIFF:\n",
" xengine_diff_class = ant_class.non_noiselike_diff_by_xengine_checker(data, diff_data, flag_waterfall=array_flags, \n",
" antenna_class=overall_class, \n",
" xengine_chans=96, bad_xengine_zcut=bad_xengine_zcut)\n",
" \n",
" if np.all([overall_class[utils.split_bl(bl)[0]] == 'bad' for bl in auto_bls]):\n",
" raise ValueError('All antennas are flagged after flagging non-noiselike diffs.')\n",
"overall_class += xengine_diff_class"
]
},
{
Expand Down Expand Up @@ -738,7 +786,9 @@
"outputs": [],
"source": [
"# delete diffs to save memory\n",
"del diff_data, hd_diff, cache\n",
"if USE_DIFF:\n",
" del diff_data, hd_diff\n",
"del cache\n",
"malloc_trim()"
]
},
Expand Down

0 comments on commit e73a837

Please sign in to comment.