Skip to content

Commit

Permalink
Merge pull request #14 from actris-cloudnet/scan-time
Browse files Browse the repository at this point in the history
Fix scan time issues
  • Loading branch information
tukiains authored Mar 22, 2024
2 parents ed4eef1 + fac7270 commit 33cd433
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 30 deletions.
29 changes: 14 additions & 15 deletions mwrpy/level1/write_lev1_nc.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,18 +140,16 @@ def prepare_data(
rpg_bin.data["status"][ind_brt, :] = hkd_sanity_check(
rpg_hkd.data["status"][ind_hkd], params
)
if params["scan_time"] != Fill_Value_Int:
file_list_bls = get_file_list(path_to_files, "BLS")
if len(file_list_bls) > 0:
rpg_bls = RpgBin(file_list_bls)
_add_bls(rpg_bin, rpg_bls, rpg_hkd, params)
else:
file_list_blb = get_file_list(path_to_files, "BLB")
if len(file_list_blb) > 0 and np.any(
rpg_hkd.data["status"][:] & 2**18 > 0
):
rpg_blb = RpgBin(file_list_blb)
_add_blb(rpg_bin, rpg_blb, rpg_hkd, params)

file_list_bls = get_file_list(path_to_files, "BLS")
if len(file_list_bls) > 0:
rpg_bls = RpgBin(file_list_bls)
_add_bls(rpg_bin, rpg_bls, rpg_hkd, params)
else:
file_list_blb = get_file_list(path_to_files, "BLB")
if len(file_list_blb) > 0 and np.any(rpg_hkd.data["status"][:] & 2**18 > 0):
rpg_blb = RpgBin(file_list_blb)
_add_blb(rpg_bin, rpg_blb, rpg_hkd, params)

if params["azi_cor"] != Fill_Value_Float:
_azi_correction(rpg_bin.data, params)
Expand Down Expand Up @@ -405,13 +403,14 @@ def _add_blb(brt: RpgBin, blb: RpgBin, hkd: RpgBin, params: dict) -> None:
if bool(key) is True
]
)
scan_time = np.median(seqs[1:-1, 2])

for time_ind, time_blb in enumerate(blb.data["time"]):
seqi = np.where(
np.abs(hkd.data["time"][seqs[:, 1] + seqs[:, 2] - 1] - time_blb) < 60
)[0]
if len(seqi) != 1:
time_blb = time_blb + int(params["scan_time"])
time_blb = time_blb + int(scan_time)
seqi = np.where(
np.abs(hkd.data["time"][seqs[:, 1] + seqs[:, 2] - 1] - time_blb) < 60
)[0]
Expand Down Expand Up @@ -476,15 +475,15 @@ def _add_blb(brt: RpgBin, blb: RpgBin, hkd: RpgBin, params: dict) -> None:
if len(time_bnds_add) == 0:
time_bnds_add = add_time_bounds(
time_add,
int(np.floor(params["scan_time"] / (blb.header["_n_ang"]))),
int(np.floor(scan_time / (blb.header["_n_ang"]))),
)
else:
time_bnds_add = np.concatenate(
(
time_bnds_add,
add_time_bounds(
time_add,
int(np.floor(params["scan_time"] / (blb.header["_n_ang"]))),
int(np.floor(scan_time / (blb.header["_n_ang"]))),
),
)
)
Expand Down
29 changes: 17 additions & 12 deletions mwrpy/level2/write_lev2_nc.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
from mwrpy.level2.lev2_meta_nc import get_data_attributes
from mwrpy.level2.lwp_offset import correct_lwp_offset
from mwrpy.utils import (
add_time_bounds,
get_ret_info,
interpol_2d,
interpolate_2d,
Expand Down Expand Up @@ -67,7 +66,7 @@ def lev2_to_nc(
with nc.Dataset(lev1_file) as lev1:
params["altitude"] = np.median(lev1.variables["altitude"][:])

rpg_dat, coeff, index = get_products(
rpg_dat, coeff, index, scan_time = get_products(
site,
lev1,
data_type,
Expand All @@ -76,7 +75,7 @@ def lev2_to_nc(
temp_file=temp_file,
hum_file=hum_file,
)
_combine_lev1(lev1, rpg_dat, index, data_type, params)
_combine_lev1(lev1, rpg_dat, index, data_type, scan_time)
_del_att(global_attributes)
hatpro = rpg_mwr.Rpg(rpg_dat)
hatpro.data = get_data_attributes(hatpro.data, data_type, coeff)
Expand All @@ -91,15 +90,15 @@ def get_products(
coeff_files: list | None,
temp_file: str | None = None,
hum_file: str | None = None,
) -> tuple[dict, dict, np.ndarray]:
) -> tuple[dict, dict, np.ndarray, np.ndarray]:
"""Derive specified Level 2 products."""

if "elevation_angle" in lev1.variables:
elevation_angle = lev1["elevation_angle"][:]
else:
elevation_angle = 90 - lev1["zenith_angle"][:]

rpg_dat, coeff, index = {}, {}, np.empty(0)
rpg_dat, coeff, index, scan_time = {}, {}, np.empty(0), np.empty(0)

if data_type in ("2I01", "2I02"):
product = "lwp" if data_type == "2I01" else "iwv"
Expand Down Expand Up @@ -351,10 +350,11 @@ def get_products(
& (lev1["pointing_flag"][:] == 1)
& (np.arange(len(lev1["time"])) + len(coeff["AG"]) < len(lev1["time"]))
)[0]
ibl, tb = (
ibl, tb, scan_time = (
np.empty([0, len(coeff["AG"])], np.int32),
np.ones((len(freq_ind), len(coeff["AG"]), 0), np.float32)
* Fill_Value_Float,
np.empty(0, np.int32),
)

for ix0v in ix0:
Expand All @@ -367,6 +367,11 @@ def get_products(
atol=0.5,
)
):
scan_time = np.append(
scan_time,
[np.array(lev1["time"][ix1v - 1] - lev1["time"][ix0v])],
axis=0,
)
ibl = np.append(ibl, [np.array(range(ix0v, ix1v))], axis=0)
tb = np.concatenate(
(
Expand Down Expand Up @@ -513,9 +518,9 @@ def get_products(
rpg_dat,
np.arange(len(tem_dat.variables["time"][:])),
data_type,
params,
scan_time,
)
return rpg_dat, coeff, index
return rpg_dat, coeff, index, scan_time


def _get_qf(
Expand All @@ -540,7 +545,7 @@ def _combine_lev1(
rpg_dat: dict,
index: np.ndarray,
data_type: str,
params: dict,
scan_time: np.ndarray,
) -> None:
"""add level1 data"""
lev1_vars = [
Expand All @@ -558,9 +563,9 @@ def _combine_lev1(
if ivars not in lev1.variables:
continue
if (ivars == "time_bnds") & (data_type == "2P02"):
rpg_dat[ivars] = add_time_bounds(
lev1["time"][index], params["scan_time"]
)
rpg_dat[ivars] = np.ndarray((len(index), 2))
rpg_dat[ivars][:, 0] = lev1["time"][index] - scan_time
rpg_dat[ivars][:, 1] = lev1["time"][index]
elif (ivars == "time_bnds") & (data_type in ("2P04", "2P07", "2P08")):
rpg_dat[ivars] = np.ones(lev1[ivars].shape, np.int32) * Fill_Value_Int
else:
Expand Down
3 changes: 0 additions & 3 deletions mwrpy/site_config/hatpro.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -95,9 +95,6 @@ params:
# integration time of measurements in seconds
int_time: 1

# integration time for BL scans in seconds
scan_time: 50.

# Azimuth angle is transformed to geographical coordinates (E=90 and W=270), currently only for RPG scanners.
# If you do not want to transform the coordinates set azi_cor to -999.
azi_cor: -999.
Expand Down

0 comments on commit 33cd433

Please sign in to comment.