Skip to content

Commit

Permalink
add per-baseline signal loss plot and baselines excluded due to too h…
Browse files Browse the repository at this point in the history
…igh signal loss

plus some other parameter tweaks, TODOs added, etc.
  • Loading branch information
jsdillon committed Oct 29, 2024
1 parent bd76c9f commit 8c2a233
Showing 1 changed file with 112 additions and 17 deletions.
129 changes: 112 additions & 17 deletions notebooks/power_spectrum_summary.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,18 @@
"source": [
"# Power Spectrum Summary\n",
"\n",
"**by Josh Dillon**, last updated October 17, 2024\n",
"**by Josh Dillon**, last updated October 28, 2024\n",
"\n",
"The purpose of this notebook is to pull together results from power spectra from single, redundantly-averaged baselines (typically cross-power spectra from interleaved sets of times) as produced by the [Single Baseline Filtering and Power Spectrum Estimation\n",
"notebook](https://github.com/HERA-Team/hera_notebook_templates/blob/master/notebooks/single_baseline_postprocessing_and_pspec.ipynb). It is supposed to be roughly comparable to [a similar notebook from H1C](https://github.com/HERA-Team/H1C_IDR3_Power_Spectra/blob/main/SPOILERS/All_Epochs_Power_Spectra/H1C_IDR3_Power_Spectra.ipynb).\n",
"\n",
"# [• Figure 1: P(k) Averaged Over Baseline vs. LST](#Figure-1:-P(k)-Averaged-Over-Baseline-vs.-LST)\n",
"# [• Figure 2: Per-Baseline, Time-Averaged High Delay Average SNR](#Figure-2:-Per-Baseline,-Time-Averaged-High-Delay-Average-SNR)\n",
"# [• Figure 3: Histograms of Time-Averaged High-Delay SNRs](#Figure-3:-Histograms-of-Time-Averaged-High-Delay-SNRs)\n",
"# [• Figure 4: Time-Averaged Cylindrical P(k)](#Figure-4:-Time-Averaged-Cylindrical-P(k))\n",
"# [• Figure 5: Time-Averaged Cylindrical SNR](#Figure-5:-Time-Averaged-Cylindrical-SNR)\n",
"# [• Figure 6: Spherically-Averaged $\\Delta^2$](#Figure-6:-Spherically-Averaged-%24%5CDelta%5E2%24)\n",
"# [• Figure 2: Per-Baseline Signal Loss Corrections](#Figure-2:-Per-Baseline-Signal-Loss-Corrections)\n",
"# [• Figure 3: Per-Baseline, Time-Averaged High Delay Average SNR](#Figure-3:-Per-Baseline,-Time-Averaged-High-Delay-Average-SNR)\n",
"# [• Figure 4: Histograms of Time-Averaged High-Delay SNRs](#Figure-4:-Histograms-of-Time-Averaged-High-Delay-SNRs)\n",
"# [• Figure 5: Time-Averaged Cylindrical P(k)](#Figure-5:-Time-Averaged-Cylindrical-P(k))\n",
"# [• Figure 6: Time-Averaged Cylindrical SNR](#Figure-6:-Time-Averaged-Cylindrical-SNR)\n",
"# [• Figure 7: Spherically-Averaged $\\Delta^2$](#Figure-7:-Spherically-Averaged-%24%5CDelta%5E2%24)\n",
"# [• Table 1: Power Spectra, Error Bars, and Upper Limits](#Table-1:-Power-Spectra,-Error-Bars,-and-Upper-Limits)"
]
},
Expand Down Expand Up @@ -84,15 +85,19 @@
"# COMBINED_PSPEC_FILE = '/lustre/aoc/projects/hera/jsdillon/H6C/PSPEC/pspec_out/all_baselines_interleaved_IDR2.3_500ns_14band.pspec.h5'\n",
"COHERENT_AVG_CORRECTION_FACTOR_FILE = os.environ.get(\"COHERENT_AVG_CORRECTION_FACTOR_FILE\", '') \n",
"# COHERENT_AVG_CORRECTION_FACTOR_FILE = '/lustre/aoc/projects/hera/jsdillon/H6C/PSPEC/pspec_out/all_baselines_interleaved_IDR2.3_500ns_14band.coherent_avg_correction_factor.p'\n",
"FRF_SIGNAL_LOSS_FILE = os.environ.get(\"FRF_SIGNAL_LOSS_FILE\", '') \n",
"# FRF_SIGNAL_LOSS_FILE = '/lustre/aoc/projects/hera/jsdillon/H6C/PSPEC/pspec_out/all_baselines_interleaved_IDR2.3_500ns_14band.frf_signal_loss.p'\n",
"RELOAD_PSPEC = os.environ.get(\"RELOAD_PSPEC\", \"TRUE\").upper() == \"TRUE\"\n",
"RELOAD_PSPEC = False\n",
"# RELOAD_PSPEC = False\n",
"\n",
"RESULTS_FOLDER = os.environ.get(\"RESULTS_FOLDER\", '') \n",
"# RESULTS_FOLDER = '/lustre/aoc/projects/hera/jsdillon/H6C/PSPEC/pspec_out/500ns_14band_results/'\n",
"\n",
"BANDS_TO_USE = [int(band) for band in os.environ.get(\"BANDS_TO_USE\", \"1,2,3,5,6,9,10,13\").split(\",\")] # 1 indexed\n",
"\n",
"WEDGE_BUFFER_NS = float(os.environ.get(\"WEDGE_BUFFER_NS\", 300))\n",
"WEDGE_BUFFER_NS = float(os.environ.get(\"WEDGE_BUFFER_NS\", 500))\n",
"\n",
"MAX_FRF_SIGNAL_LOSS = float(os.environ.get(\"MAX_FRF_SIGNAL_LOSS\", .1))\n",
"\n",
"LST_RANGE_HOURS = [float(lst) for lst in os.environ.get(\"LST_RANGE_HOURS\", \"1.5,5.5\").split(\",\")]\n",
"if LST_RANGE_HOURS[0] > LST_RANGE_HOURS[-1]:\n",
Expand Down Expand Up @@ -187,6 +192,8 @@
" # write dpss_coherent_avg_corrections to a pickle\n",
" with open(COHERENT_AVG_CORRECTION_FACTOR_FILE, 'wb') as f:\n",
" pickle.dump(dpss_coherent_avg_corrections, f)\n",
" with open(FRF_SIGNAL_LOSS_FILE, 'wb') as f:\n",
" pickle.dump(frf_losses, f)\n",
" \n",
" uvp = hp.uvpspec.recursive_combine_uvpspec(list(uvp_by_blp.values()))\n",
" psc = hp.PSpecContainer(COMBINED_PSPEC_FILE, mode='rw', keep_open=False)\n",
Expand All @@ -196,7 +203,9 @@
" psc = hp.PSpecContainer(COMBINED_PSPEC_FILE, mode='rw', keep_open=False)\n",
" uvp = psc.get_pspec('stokespol', 'all_baselines')\n",
" with open(COHERENT_AVG_CORRECTION_FACTOR_FILE, 'rb') as f:\n",
" dpss_coherent_avg_corrections = pickle.load(f) "
" dpss_coherent_avg_corrections = pickle.load(f)\n",
" with open(FRF_SIGNAL_LOSS_FILE, 'rb') as f:\n",
" frf_losses = pickle.load(f)"
]
},
{
Expand Down Expand Up @@ -395,6 +404,71 @@
" # TODO: Jianrong to figure out whether P_SN should be treated differently here"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "132c965a-d596-407a-a9b0-eb38fcdc8aee",
"metadata": {},
"outputs": [],
"source": [
"def plot_baseline_signal_loss():\n",
" # Show signal loss correction applied to each baseline as a function of spectral window, highlighting those with too much signal loss \n",
" fig, axes = plt.subplots(int(np.ceil(uvp.Nspws / 2)),2, figsize=(12, 12), sharex=True, sharey=True, gridspec_kw={'wspace': 0, 'hspace':0})\n",
" \n",
" for spw, ax in enumerate(axes.ravel()):\n",
" if spw == len(uvp.spw_array):\n",
" break\n",
" \n",
" bl_vecs = {}\n",
" for key in uvp_tavg.get_all_keys():\n",
" _spw, blp, pp = key\n",
" if _spw != spw: \n",
" continue\n",
" if pp[0] != 'pI':\n",
" continue\n",
" blv = blp_to_blvec_dict[blp]\n",
" if blv[1] < 0:\n",
" bl_vecs[blp] = -blv\n",
" else:\n",
" bl_vecs[blp] = blv\n",
" \n",
" if spw % 2 == 0:\n",
" ax.set_ylabel('NS Baseline Component (m)')\n",
" if spw >= int(np.ceil(uvp.Nspws / 2)) - 2:\n",
" ax.set_xlabel('EW Baseline Component (m)')\n",
" \n",
" blps = list(bl_vecs.keys())\n",
" im = ax.scatter([bl_vecs[blp][0] for blp in blps], [bl_vecs[blp][1] for blp in blps], c=[frf_losses[blp][spw] for blp in blps], \n",
" edgecolors=['r' if (frf_losses[blp][spw] > MAX_FRF_SIGNAL_LOSS) else 'none' for blp in blps], linewidths=.5,\n",
" s=20, cmap='viridis', vmin=0, vmax=min(2 * MAX_FRF_SIGNAL_LOSS, 1))\n",
" \n",
" ax.text(ax.get_xlim()[0] + 10, ax.get_ylim()[-1] - 10, f'Band {spw + 1}\\nz = {zs[spw]:.1f}', ha='left', va='top',\n",
" bbox=dict(facecolor='w', edgecolor='black', alpha=.75, boxstyle='round', ls='-'))\n",
" \n",
" plt.tight_layout()\n",
" plt.colorbar(im, ax=axes, pad=.02, aspect=40, extend='max', location='top', label=f'Signal Loss due to Fringe Rate and Cross Talk Filters')\n",
" plt.savefig(os.path.join(RESULTS_FOLDER, 'per_baseline_signal_loss_all_bands.pdf'), bbox_inches='tight')"
]
},
{
"cell_type": "markdown",
"id": "96a2a02e-1fd6-4264-ab03-0f19da6cb778",
"metadata": {},
"source": [
"# Figure 2: Per-Baseline Signal Loss Corrections\n",
"Baselines outlined in red have too much signal loss and are excluded from the final power spectrum."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "30f64341-c302-4c6c-ba61-2242eec0ee1c",
"metadata": {},
"outputs": [],
"source": [
"plot_baseline_signal_loss()"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand Down Expand Up @@ -436,7 +510,9 @@
" ax.set_xlabel('EW Baseline Component (m)')\n",
" \n",
" blps = list(bl_vecs.keys())\n",
" \n",
" im = ax.scatter([bl_vecs[blp][0] for blp in blps], [bl_vecs[blp][1] for blp in blps], c=[snr_avgs[blp] for blp in blps], \n",
" edgecolors=['k' if (frf_losses[blp][spw] >= MAX_FRF_SIGNAL_LOSS) else 'none' for blp in blps], linewidths=.5,\n",
" s=20, cmap='bwr', vmin=-2, vmax=2)\n",
" \n",
" ax.text(ax.get_xlim()[0] + 10, ax.get_ylim()[-1] - 10, f'Band {spw + 1}\\nz = {zs[spw]:.1f}', ha='left', va='top',\n",
Expand All @@ -452,7 +528,8 @@
"id": "2f556bdf-9281-41bb-bd8c-46ab1cfc7e89",
"metadata": {},
"source": [
"# Figure 2: Per-Baseline, Time-Averaged High Delay Average SNR"
"# Figure 3: Per-Baseline, Time-Averaged High Delay Average SNR\n",
"Baselines outlined in black have too much signal loss and are excluded from the final power spectrum."
]
},
{
Expand Down Expand Up @@ -490,6 +567,8 @@
" continue\n",
" if pp[0] != 'pI':\n",
" continue\n",
" if frf_losses[blp][spw] > MAX_FRF_SIGNAL_LOSS:\n",
" continue\n",
" SNRs = uvp_tavg.get_data(key)[0, dlys_to_use] / uvp_tavg.get_stats('P_N', key)[0, dlys_to_use].real\n",
" to_hist.extend(SNRs)\n",
" \n",
Expand Down Expand Up @@ -524,7 +603,7 @@
"id": "045db541-798c-4174-b898-6d28d21ed43e",
"metadata": {},
"source": [
"# Figure 3: Histograms of Time-Averaged High-Delay SNRs"
"# Figure 4: Histograms of Time-Averaged High-Delay SNRs"
]
},
{
Expand All @@ -547,6 +626,7 @@
"def plot_cylindrical_Pk(SNR=False):\n",
" # TODO: update to show k_|| and k_perp\n",
" # TODO: update to show wedge buffer\n",
" # TODO: exclude high signal loss baselines\n",
" fig, axes = plt.subplots(int(np.ceil(uvp.Nspws / 2)),2, figsize=(12, 12), \n",
" sharex=True, sharey=True, gridspec_kw={'wspace': 0, 'hspace':0})\n",
"\n",
Expand Down Expand Up @@ -590,7 +670,7 @@
"id": "f1727342-717e-4acc-b0c0-c156b757bc0a",
"metadata": {},
"source": [
"# Figure 4: Time-Averaged Cylindrical P(k)"
"# Figure 5: Time-Averaged Cylindrical P(k)"
]
},
{
Expand All @@ -608,7 +688,7 @@
"id": "64786842-f69f-48cc-888d-ef78e76d3e01",
"metadata": {},
"source": [
"# Figure 5: Time-Averaged Cylindrical SNR"
"# Figure 6: Time-Averaged Cylindrical SNR"
]
},
{
Expand All @@ -621,6 +701,17 @@
"plot_cylindrical_Pk(SNR=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "085d7c11-241e-4e1b-9b27-ec1082203920",
"metadata": {},
"outputs": [],
"source": [
"# TODO: \n",
"# provide 2D Delta^2 with error covariance and window functions"
]
},
{
"cell_type": "markdown",
"id": "b477cde3-f92a-48b5-84c3-71b64a870baf",
Expand Down Expand Up @@ -657,7 +748,8 @@
"for spw in tqdm(uvp.spw_array):\n",
" dk = dk_multiplier * np.median(np.diff(uvp_tavg_copy.get_kparas(spw)))\n",
" kbins = np.arange(k_start_multiplier * dk, 2.5, dk) # even spacing \n",
" uvp_tavg_this_spw = uvp_tavg_copy.select(spws=[spw], inplace=False)\n",
" blps_to_use = [blp for blp in uvp_tavg_copy.get_blpairs() if frf_losses[blp][spw] <= MAX_FRF_SIGNAL_LOSS]\n",
" uvp_tavg_this_spw = uvp_tavg_copy.select(spws=[spw], blpairs=blps_to_use, inplace=False)\n",
" sph_avgs.append(hp.grouping.spherical_average(uvp_tavg_this_spw, kbins, dk, error_weights='P_N'))\n",
"\n",
"delta_sqs = [sph_avg.convert_to_deltasq(inplace=False) for sph_avg in sph_avgs]"
Expand Down Expand Up @@ -702,7 +794,10 @@
" ax.errorbar(k, Dsq, yerr=(2 * psn_error), marker='o', ms=6, ls='', c='deeppink', label='1$\\\\sigma$ Noise Level')\n",
" ax.plot(k, pn_error, c='k', ls='--', lw=3, label='PRELIMINARY Power Spectrum with 2$\\\\sigma$ Error Bars')\n",
" ax.set_yscale('log')\n",
" ax.set_ylim([1, ax.get_ylim()[1]])\n",
" if spw >= uvp.Nspws / 2:\n",
" ax.set_ylim([1, 2e6])\n",
" else:\n",
" ax.set_ylim([1, 1e9])\n",
" ax.grid()\n",
" ax.text(ax.get_xlim()[-1] - .1, ax.get_ylim()[0]*3, f'Band {spw+1}\\nz = {z:.1f}', ha='right', va='bottom',\n",
" bbox=dict(facecolor='w', edgecolor='black', alpha=.75, boxstyle='round', ls='-'))\n",
Expand All @@ -724,7 +819,7 @@
"id": "039ff996-0299-4ac1-a8be-d99d237f3a45",
"metadata": {},
"source": [
"# Figure 6: Spherically-Averaged $\\Delta^2$"
"# Figure 7: Spherically-Averaged $\\Delta^2$"
]
},
{
Expand Down Expand Up @@ -854,7 +949,7 @@
{
"cell_type": "code",
"execution_count": null,
"id": "d34b9dc8-117f-49b9-8814-fa4e8424aff5",
"id": "b768a18e-4d4a-4989-8add-60315cbbf442",
"metadata": {},
"outputs": [],
"source": []
Expand Down

0 comments on commit 8c2a233

Please sign in to comment.