diff --git a/heracles/fields.py b/heracles/fields.py index 07f867c..543150c 100644 --- a/heracles/fields.py +++ b/heracles/fields.py @@ -305,10 +305,14 @@ async def __call__( del vis # compute bias of positions, including weight variance - bias = ngal / (4 * np.pi) * mapper.area**2 * (w2mean / nbar**2) + musq = 1.0 + dens = (nbar / mapper.area) ** 2 / (ngal / (4 * np.pi * fsky)) / w2mean + bias = fsky * musq / dens # set metadata of array - update_metadata(pos, catalog, nbar=nbar, bias=bias) + update_metadata( + pos, catalog, nbar=nbar, musq=musq, dens=dens, fsky=fsky, bias=bias + ) # return the position map return pos @@ -338,7 +342,7 @@ async def __call__( # total weighted variance from online algorithm ngal = 0 - wmean, var = 0.0, 0.0 + wmean, w2mean, var = 0.0, 0.0, 0.0 # go through pages in catalogue and map values async for page in aiter_pages(catalog, progress): @@ -354,6 +358,7 @@ async def __call__( ngal += page.size wmean += (w - wmean).sum() / ngal + w2mean += (w**2 - w2mean).sum() / ngal var += (v**2 - var).sum() / ngal del lon, lat, v, w @@ -371,10 +376,15 @@ async def __call__( val /= wbar # compute bias from variance (per object) - bias = 4 * np.pi * fsky**2 * (var / wmean**2) / ngal + musq = var / w2mean + deff = w2mean / wmean**2 + dens = ngal / (4 * np.pi * fsky) / deff + bias = fsky * musq / dens # set metadata of array - update_metadata(val, catalog, wbar=wbar, bias=bias) + update_metadata( + val, catalog, wbar=wbar, musq=musq, dens=dens, fsky=fsky, bias=bias + ) # return the value map return val @@ -409,7 +419,7 @@ async def __call__( # total weighted variance from online algorithm ngal = 0 - wmean, var = 0.0, 0.0 + wmean, w2mean, var = 0.0, 0.0, 0.0 # go through pages in catalogue and get the shear values, async for page in aiter_pages(catalog, progress): @@ -425,6 +435,7 @@ async def __call__( ngal += page.size wmean += (w - wmean).sum() / ngal + w2mean += (w**2 - w2mean).sum() / ngal var += (re**2 + im**2 - var).sum() / ngal del lon, lat, re, im, w @@ -441,10 +452,15 @@ async def __call__( val /= wbar # bias from measured variance, for E/B decomposition - bias = 2 * np.pi * fsky**2 * (var / wmean**2) / ngal + musq = var / w2mean + deff = w2mean / wmean**2 + dens = ngal / (4 * np.pi * fsky) / deff + bias = (1 / 2) * fsky * musq / dens # set metadata of array - update_metadata(val, catalog, wbar=wbar, bias=bias) + update_metadata( + val, catalog, wbar=wbar, musq=musq, dens=dens, fsky=fsky, bias=bias + ) # return the shear map return val @@ -541,10 +557,15 @@ async def __call__( wht /= wbar # bias from weights - bias = 4 * np.pi * fsky**2 * (w2mean / wmean**2) / ngal + musq = 1.0 + deff = w2mean / wmean**2 + dens = ngal / (4 * np.pi * fsky) / deff + bias = fsky * musq / dens # set metadata of array - update_metadata(wht, catalog, wbar=wbar, bias=bias) + update_metadata( + wht, catalog, wbar=wbar, musq=musq, dens=dens, fsky=fsky, bias=bias + ) # return the weight map return wht diff --git a/tests/test_fields.py b/tests/test_fields.py index d8fff0a..e4909b8 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -204,6 +204,9 @@ def test_positions(mapper, catalog, vmap): "nside": mapper.nside, "lmax": mapper.lmax, "deconv": mapper.deconvolve, + "musq": 1.0, + "dens": pytest.approx(npix / np.pi), + "fsky": 1.0, "bias": pytest.approx(bias / nbar**2), } np.testing.assert_array_equal(m, 0) @@ -223,6 +226,9 @@ def test_positions(mapper, catalog, vmap): "nside": mapper.nside, "lmax": mapper.lmax, "deconv": mapper.deconvolve, + "musq": 1.0, + "dens": pytest.approx(npix / np.pi), + "fsky": 1.0, "bias": pytest.approx(bias / nbar**2), } np.testing.assert_array_equal(m, 1.0) @@ -246,6 +252,9 @@ def test_positions(mapper, catalog, vmap): "nside": mapper.nside, "lmax": mapper.lmax, "deconv": mapper.deconvolve, + "musq": 1.0, + "dens": pytest.approx(npix / (np.pi * catalog.fsky)), + "fsky": catalog.fsky, "bias": pytest.approx(bias / nbar**2), } @@ -264,6 +273,9 @@ def test_positions(mapper, catalog, vmap): "nside": mapper.nside, "lmax": mapper.lmax, "deconv": mapper.deconvolve, + "musq": 1.0, + "dens": pytest.approx(npix / (np.pi * catalog.fsky)), + "fsky": catalog.fsky, "bias": pytest.approx(bias / nbar**2), } @@ -287,9 +299,17 @@ def test_scalar_field(mapper, catalog): w = next(iter(catalog))["w"] v = next(iter(catalog))["g1"] + v1 = w.sum() v2 = ((w * v) ** 2).sum() + v3 = (w**2).sum() w = w.reshape(w.size // 4, 4).sum(axis=-1) wbar = w.mean() + wmean = v1 / (4.0 * npix) + w2mean = v3 / (4.0 * npix) + var = v2 / (4.0 * npix) + musq = var / w2mean + deff = w2mean / wmean**2 + dens = npix / np.pi / deff bias = (4 * np.pi / npix / npix) * v2 assert m.shape == (npix,) @@ -302,6 +322,9 @@ def test_scalar_field(mapper, catalog): "nside": mapper.nside, "lmax": mapper.lmax, "deconv": mapper.deconvolve, + "musq": pytest.approx(musq), + "dens": pytest.approx(dens), + "fsky": 1.0, "bias": pytest.approx(bias / wbar**2), } np.testing.assert_array_almost_equal(m, 0) @@ -318,9 +341,17 @@ def test_complex_field(mapper, catalog): w = next(iter(catalog))["w"] re = next(iter(catalog))["g1"] im = next(iter(catalog))["g2"] + v1 = w.sum() v2 = ((w * re) ** 2 + (w * im) ** 2).sum() + v3 = (w**2).sum() w = w.reshape(w.size // 4, 4).sum(axis=-1) wbar = w.mean() + wmean = v1 / (4.0 * npix) + w2mean = v3 / (4.0 * npix) + var = v2 / (4.0 * npix) + musq = var / w2mean + deff = w2mean / wmean**2 + dens = npix / np.pi / deff bias = (4 * np.pi / npix / npix) * v2 / 2 assert m.shape == (2, npix) @@ -333,6 +364,9 @@ def test_complex_field(mapper, catalog): "nside": mapper.nside, "lmax": mapper.lmax, "deconv": mapper.deconvolve, + "fsky": 1.0, + "dens": pytest.approx(dens), + "musq": pytest.approx(musq), "bias": pytest.approx(bias / wbar**2), } np.testing.assert_array_almost_equal(m, 0) @@ -351,6 +385,11 @@ def test_weights(mapper, catalog): w = w.reshape(w.size // 4, 4).sum(axis=-1) wbar = w.mean() bias = (4 * np.pi / npix / npix) * v2 + v1 = w.sum() + wmean = v1 / (4.0 * npix) + w2mean = v2 / (4.0 * npix) + deff = w2mean / wmean**2 + dens = npix / np.pi / deff assert m.shape == (12 * mapper.nside**2,) assert m.dtype.metadata == { @@ -362,6 +401,9 @@ def test_weights(mapper, catalog): "nside": mapper.nside, "lmax": mapper.lmax, "deconv": mapper.deconvolve, + "musq": 1.0, + "dens": pytest.approx(dens), + "fsky": 1.0, "bias": pytest.approx(bias / wbar**2), } np.testing.assert_array_almost_equal(m, w / wbar)