From 01f2a9d1b64daf0f6a866f08645c1a8810782e40 Mon Sep 17 00:00:00 2001 From: Michael Wilensky Date: Tue, 8 Oct 2024 13:14:27 -0600 Subject: [PATCH] Use updated hera_cal --- ...le_baseline_postprocessing_and_pspec.ipynb | 57 +++++++++++++------ 1 file changed, 41 insertions(+), 16 deletions(-) diff --git a/notebooks/single_baseline_postprocessing_and_pspec.ipynb b/notebooks/single_baseline_postprocessing_and_pspec.ipynb index fd17bd9..22acaae 100644 --- a/notebooks/single_baseline_postprocessing_and_pspec.ipynb +++ b/notebooks/single_baseline_postprocessing_and_pspec.ipynb @@ -1309,13 +1309,13 @@ "\n", "\n", " \n", - " frop = FRF_cov_calc.get_frop(times, filter_cent_use=[np.mean(fr_ranges[band]) / 1000], \n", - " filter_half_wid_use=[np.diff(fr_ranges[band]) / 2 / 1000], \n", - " freqs=data.freqs[band_slice], t_avg=t_avg,\n", - " cutoff=FR_EIGENVAL_CUTOFF, weights=weights,\n", - " rephase=rephase, wgt_tavg_by_nsample=wgt_tavg_by_nsample,\n", - " nsamples=nsamples, bl_vec=bl_vec, dlst=dlst,\n", - " coherent_avg=coherent_avg)\n", + " frop = frf.get_frop_for_noise(times, filter_cent_use=[np.mean(fr_ranges[band]) / 1000], \n", + " filter_half_wid_use=[np.diff(fr_ranges[band]) / 2 / 1000], \n", + " freqs=data.freqs[band_slice], t_avg=t_avg,\n", + " cutoff=FR_EIGENVAL_CUTOFF, weights=weights,\n", + " rephase=rephase, wgt_tavg_by_nsample=wgt_tavg_by_nsample,\n", + " nsamples=nsamples, bl_vec=bl_vec, dlst=dlst,\n", + " coherent_avg=coherent_avg)\n", " return frop\n", "\n", "def get_alt_frf_dat(pol=\"nn\", stream_ind=0, band_ind=0, t_avg=AVERAGING_TIME, \n", @@ -1616,17 +1616,44 @@ { "cell_type": "code", "execution_count": null, - "id": "af7a6180", + "id": "ef75f273", "metadata": {}, "outputs": [], "source": [ "if USE_CORR_MATRIX:\n", + " deint_vars = []\n", + " deint_frops = []\n", " deint_covs = []\n", "\n", " for stream_ind in range(NINTERLEAVE):\n", + " var_dict = {}\n", + " frop_dict = {}\n", " cov_dict = {}\n", + "\n", " for band_ind, band in enumerate(bands):\n", - " cov_dict[band] = get_pI_cov(band_ind, stream_ind)\n", + " var_dict[band] = {}\n", + " frop_dict[band] = {}\n", + " cov_dict[band] = {}\n", + " for pol in (\"ee\", \"nn\"):\n", + " cross_antpairpol = ANTPAIR + (pol,)\n", + " freq_slice = band_slices[band_ind]\n", + " var_dict[band][pol] = frf.prep_var_for_frop(deint_filt_data[stream_ind],\n", + " deint_nsamples[stream_ind],\n", + " deint_wgts[stream_ind],\n", + " cross_antpairpol,\n", + " freq_slice,\n", + " auto_ant=0)\n", + " frop_dict[band][pol] = get_frop_wrapper(pol=pol, stream_ind=stream_ind, band_ind=band_ind,\n", + " nsamples=deint_nsamples[stream_ind][cross_antpairpol][:, freq_slice],\n", + " dlst=dlst, bl_vec=bl_vec[cross_antpairpol])\n", + " \n", + " \n", + " cov_dict[band][pol] = frf.get_FRF_cov(frop_dict[band][pol], var_dict[band][pol])\n", + " cov_dict[band][\"pI\"] = cov_dict[band][\"ee\"] + cov_dict[band][\"nn\"]\n", + " if hd.pol_convention == \"avg\":\n", + " cov_dict[band][\"pI\"] *= 0.25\n", + " deint_vars.append(var_dict)\n", + " deint_frops.append(frop_dict)\n", " deint_covs.append(cov_dict)" ] }, @@ -1639,7 +1666,7 @@ "source": [ "if USE_CORR_MATRIX:\n", "\n", - " example_cov = deint_covs[0][bands[0]]" + " example_cov = deint_covs[0][bands[0]][\"pI\"]" ] }, { @@ -1671,7 +1698,7 @@ "source": [ "if USE_CORR_MATRIX:\n", "\n", - " corr = get_corr(example_cov)\n", + " corr = frf.get_corr(example_cov)\n", "\n", "\n", " plt.matshow(np.abs(corr[cov_freq_ind]), norm=matplotlib.colors.LogNorm(), extent=co_lst_extent)\n", @@ -1722,16 +1749,14 @@ "metadata": {}, "outputs": [], "source": [ - "if USE_CORR_MATRIX:\n", + "corr_factor = frf.get_correction_factor_from_cov(example_cov)\n", "\n", - " plt.plot(data.freqs[band_slices[0]] * 1e-6, get_correction_factor_from_cov(example_cov))\n", - " plt.xlabel(\"Frequency (MHz)\")\n", - " plt.ylabel(\"Correction factor\")" + "print(f\"Correction factor for example covariance (including edge times): {corr_factor}\")" ] }, { "cell_type": "markdown", - "id": "035e9a51", + "id": "64bcc708", "metadata": {}, "source": [ "# Calculate the covariance matrices"