-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
6a7bf63
commit d87c3f8
Showing
4 changed files
with
226 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -153,4 +153,7 @@ _html/ | |
**/*.pth | ||
|
||
# Run results | ||
results/ | ||
results/ | ||
|
||
# Test data | ||
data/ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,79 @@ | ||
import kbmod as kb | ||
import numpy as np | ||
import os | ||
|
||
from random_selections import generate_random_selections | ||
|
||
""" | ||
Script for generating the false positive stamps for | ||
kbmod-ml on hyak. Assumes that you have a number of kbmod results | ||
run with bad angles off the ecliptic. The given paths in | ||
this script are the ones that have been used previously, | ||
however you can generate your own base dataset by running | ||
a KBMOD search with the following generator_config: | ||
``` | ||
generator_config = { | ||
"name": "EclipticCenteredSearch", | ||
"velocities": [92.0, 526.0, 257], | ||
"angles": [-np.pi / 4, -np.pi / 2, 128], | ||
"angle_units": "radian", | ||
"velocity_units": "pix / d", | ||
"given_ecliptic": None, | ||
} | ||
``` | ||
and then replacing the `wu_paths` and `res_paths` with your | ||
`WorkUnit`s and result files respectively. | ||
""" | ||
|
||
if __name__ == "__main__": | ||
wu_dir = "/mmfs1/gscratch/dirac/wbeebe/kbmod/kbmod_wf/kbmod_new_ic/staging/stagingTNOs_3/results" | ||
wu_paths = [ | ||
"uris_20x20_420au_2019-04-02_and_2019-05-07_slice0_patch5817_lim5000.collection.wu.42.repro", | ||
"uris_20x20_420au_2019-04-02_and_2019-05-07_slice1_patch5818_lim5000.collection.wu.42.repro", | ||
"uris_20x20_420au_2019-04-02_and_2019-05-07_slice2_patch5828_lim5000.collection.wu.42.repro", | ||
"uris_20x20_420au_2019-04-02_and_2019-05-07_slice4_patch5827_lim5000.collection.wu.42.repro", | ||
"uris_20x20_420au_2019-04-03_and_2019-05-05_slice0_patch7782_lim5000.collection.wu.42.repro", | ||
"uris_20x20_420au_2019-04-03_and_2019-05-05_slice1_patch7794_lim5000.collection.wu.42.repro", | ||
"uris_20x20_420au_2019-04-03_and_2019-05-05_slice3_patch7793_lim5000.collection.wu.42.repro", | ||
"uris_20x20_420au_2019-04-03_and_2019-05-05_slice4_patch7770_lim5000.collection.wu.42.repro", | ||
"uris_20x20_420au_2019-04-03_and_2019-05-05_slice5_patch7795_lim5000.collection.wu.42.repro", | ||
"uris_20x20_420au_2019-04-03_and_2019-05-05_slice6_patch7781_lim5000.collection.wu.42.repro", | ||
] | ||
|
||
res_dir = "/mmfs1/home/maxwest/dirac/zp_corr" | ||
res_paths = [ | ||
"slice0/full_result.ecsv", | ||
"slice1/full_result.ecsv", | ||
"slice2/full_result.ecsv", | ||
"slice4/full_result.ecsv", | ||
"slice0_03/full_result.ecsv", | ||
"slice1_03/full_result.ecsv", | ||
"slice3_03/full_result.ecsv", | ||
"slice4_03/full_result.ecsv", | ||
"slice5_03/full_result.ecsv", | ||
"slice6_03/full_result.ecsv", | ||
] | ||
|
||
for w, r in zip(wu_paths, res_paths): | ||
wu = kb.work_unit.WorkUnit.from_sharded_fits(w, wu_dir) | ||
res = kb.results.Results.read_table(os.path.join(res_dir, r)) | ||
vis = kb.analysis.visualizer.Visualizer(wu.im_stack, res) | ||
vis.generate_all_stamps(radius=12) | ||
|
||
l = lambda i: [j.image for j in i] | ||
y = np.apply_along_axis(l, 1, vis.results["all_stamps"].data) | ||
|
||
coadds = [] | ||
for i in range(len(y)): | ||
row = y[i] | ||
row = row[np.sum(row, axis = (-1, -2)) != 0] | ||
|
||
c = generate_random_selections(row, 50) | ||
|
||
for a in c: | ||
coadds.append(a) | ||
|
||
coadds = np.array(coadds) | ||
np.save(f"/mmfs1/home/maxwest/dirac/false_positive_stamps/{w}_stamps.npy", coadds) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,88 @@ | ||
import numpy as np | ||
|
||
from kbmod.search import ( | ||
RawImage, | ||
LayeredImage, | ||
ImageStack, | ||
PSF, | ||
get_coadded_stamps, | ||
Trajectory, | ||
StampParameters, | ||
StampType | ||
) | ||
|
||
def generate_random_selections(rows, n, variances=None, visits=None): | ||
"""Takes a 50x50 stamp row and generates N varied sets of stamps, | ||
varied by center offset, rotation, mirror, and random selection.""" | ||
# Setup | ||
minimum_n = 25 | ||
if len(rows) <= minimum_n: | ||
n_in_range = [len(rows)] | ||
else: | ||
n_in_range = range(minimum_n, len(rows)) | ||
|
||
# possible warping options | ||
center_pix = (12, 12) | ||
x_offset_options = [-1, 0, 1] | ||
y_offset_options = [-1, 0, 1] | ||
mirror_axes = [None, 0, 1, 2] | ||
|
||
# make a randomized warping decision for | ||
# each n stamp that we are going to generate | ||
# ahead of time. | ||
n_in = np.random.choice(n_in_range, n) | ||
x_offsets = np.random.choice(x_offset_options, n) | ||
y_offsets = np.random.choice(y_offset_options, n) | ||
rots = np.random.choice(4, n) | ||
flips = np.random.choice(mirror_axes, n) | ||
|
||
# grab all the stamps for this run. | ||
coadds = [] | ||
viss = [] | ||
for i in range(n): | ||
inds = np.random.choice(len(rows), n_in[i]) | ||
sub_stamps = rows[inds] | ||
|
||
x = center_pix[0] + x_offsets[i] | ||
y = center_pix[1] + y_offsets[i] | ||
|
||
# perform warping | ||
sub_stamps = sub_stamps[:,x-10:x+11,y-10:y+11] | ||
sub_stamps = np.flip(sub_stamps, flips[i]) if flips[i] != 0 else sub_stamps | ||
sub_stamps = np.rot90(sub_stamps, k=rots[i], axes=(1,2)) | ||
|
||
# if we have the variances, get the stamp and | ||
# do the same warping as above. | ||
if variances is not None: | ||
sub_vars = variances[inds] | ||
sub_vars = sub_stamps[:,x-10:x+11,y-10:y+11] | ||
sub_vars = np.flip(sub_stamps, flips[i]) if flips[i] != 0 else sub_stamps | ||
sub_vars = np.rot90(sub_stamps, k=rots[i], axes=(1,2)) | ||
else: | ||
sub_vars = [None] * len(inds) | ||
|
||
if visits is not None: | ||
sub_vis = visits[inds] | ||
|
||
lis = [] | ||
for s, v in zip(sub_stamps, sub_vars): | ||
rivar = RawImage(v) if v is not None else RawImage(21, 21, 0.0) | ||
li = LayeredImage(RawImage(s), rivar, RawImage(21, 21, 0.0), PSF(1.0)) | ||
lis.append(li) | ||
|
||
im_stack = ImageStack(lis) | ||
i = im_stack.img_count() | ||
params = StampParameters() | ||
params.radius = 10 | ||
params.do_filtering = False | ||
s_types = [StampType.STAMP_MEDIAN, StampType.STAMP_MEAN, StampType.STAMP_SUM] | ||
|
||
stamps = [] | ||
|
||
for st in s_types: | ||
params.stamp_type = st | ||
bleh = get_coadded_stamps(im_stack, [Trajectory(10, 10)], [[True]* i], params, False) | ||
stamps.append(bleh[0].image) | ||
coadds.append(stamps) | ||
viss.append(sub_vis) | ||
return coadds, viss |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
from astropy.table import Table | ||
import os | ||
import glob | ||
import numpy as np | ||
|
||
from random_selections import generate_random_selections | ||
|
||
""" | ||
Script for generating the true positive stamps for kbmod-ml | ||
on hyak. Based on Steven's fake cutouts. | ||
""" | ||
|
||
if __name__ == "__main__": | ||
files = glob.glob("/gscratch/dirac/DEEP/collab/fakes_cutouts/data/*/npy") | ||
sub_files = files | ||
|
||
coadds = [] | ||
coadd_ids = [] | ||
orbit_ids = [] | ||
visits = [] | ||
i = 1 | ||
coadd_id = -1 | ||
for f in sub_files: | ||
i_path = f + "/image.npy" | ||
v_path = f + "/variance.npy" | ||
vi_path = f + "/visits.npy" | ||
|
||
if os.path.exists(i_path) and os.path.exists(v_path): | ||
image = np.load(i_path) | ||
var = np.load(v_path) | ||
vis = np.load(vi_path) | ||
orb_id = int(f.split("/")[-2]) | ||
|
||
if len(image) > 0: | ||
c, v = generate_random_selections(image, var, vis, 10) | ||
for a, b in zip(c, v): | ||
coadd_id += 1 | ||
coadds.append(a) | ||
visits.append(b) | ||
coadd_ids.append(coadd_id) | ||
orbit_ids.append(orb_id) | ||
|
||
|
||
if i % 1000 == 0: | ||
print(f"completed chunk {i /1000}") | ||
i += 1 | ||
coadds = np.array(coadds) | ||
np.save(f"/mmfs1/home/maxwest/dirac/true_positive_stamps_v2/tp_stamps.npy", coadds) | ||
|
||
# the true positive set has extra metadata. | ||
t = Table() | ||
t["coadd_id"] = coadd_ids | ||
t["orbit_id"] = orbit_ids | ||
t["vists"] = visits | ||
t.write("/mmfs1/home/maxwest/dirac/true_positive_stamps_v2/xmatch_meta.ecsv", format="ascii.ecsv") |