diff --git a/Sourcery/CHANGES.md b/Sourcery/CHANGES.md index d926684..37b7751 100644 --- a/Sourcery/CHANGES.md +++ b/Sourcery/CHANGES.md @@ -1,8 +1,9 @@ -##Current version: 0.3.0 +##Current version: 0.3.1 -##From version 0.2.9 to 0.3.0, the changes made were; +##From version 0.3.0 to 0.3.1, the changes made were; -#### Uses the local noise to estimate the SNR for the sources. -#### No longer uses the local variance as a parameter to identify sources that require DD calibration -#### Removes sources with R < 0.99 from the above classification +#### Masking is done as follows; first smoothing is performed at different scales and each of these are masked +#### at a given threshold. The final mask is obtained by adding these masks and setting pixels > 1 to 1, this is followed +#### by multiplying the resulting mask and the original image. +#### Whenever, no sources are not found by the source finder sourcery will give a warning and exit or continues with other images. diff --git a/Sourcery/main.py b/Sourcery/main.py index a614d06..9405801 100644 --- a/Sourcery/main.py +++ b/Sourcery/main.py @@ -157,11 +157,7 @@ def main(): pybdsm_opts[key] = eval(key) except NameError: pybdsm_opts[key] = val - - if not args.image: - print("ATTENTION: No image provided. Unless specified inside JSON file otherwise" - " the execution is aborted") - + # making outdirectory def get_prefix(prefix, imagename, outdir): @@ -182,7 +178,7 @@ def get_prefix(prefix, imagename, outdir): images = args.image.split(",") if args.image else None with open(args.config) as conf: jdict = json.load(conf) - for key,val in jdict.items(): + for key, val in jdict.items(): if isinstance(val, unicode): jdict[key] = str(val) @@ -193,7 +189,7 @@ def get_prefix(prefix, imagename, outdir): reldict = jdict["reliability"] ddict = jdict["dd_tagging"] sourcefin = jdict["source_finder_opts"] - + for key, val in reldict.iteritems(): if isinstance(val, unicode): reldict[key] = str(val) @@ -220,22 +216,25 @@ def get_prefix(prefix, imagename, outdir): reldict["prefix"] = prefix mc = rel.load(image, psf, **reldict) - pmodel, nmodel, step = mc.get_reliability() - - reldict["prefix"] = prefix - ddict["pmodel"] = pmodel - ddict["nmodel"] = nmodel - ddict["prefix"] = prefix - ddict["imagename"] = image - ddict["local_step"] = step - - if enable: - dc = dd.load( psfname=psf, **ddict) - pmodel, nmodel = dc.source_selection() - pmodel.save( prefix+".lsm.html") - nmodel.save( prefix+"_negative.lsm.html") - utils.xrun("tigger-convert", ["--rename -f", prefix + ".lsm.html"]) - + try: + pmodel, nmodel, step = mc.get_reliability() + + reldict["prefix"] = prefix + ddict["pmodel"] = pmodel + ddict["nmodel"] = nmodel + ddict["prefix"] = prefix + ddict["imagename"] = image + ddict["local_step"] = step + + if enable: + dc = dd.load( psfname=psf, **ddict) + pmodel, nmodel = dc.source_selection() + pmodel.save( prefix+".lsm.html") + nmodel.save( prefix+"_negative.lsm.html") + utils.xrun("tigger-convert", ["--rename -f", prefix + ".lsm.html"]) + except: + pass + else: image = jdict["imagename"] @@ -256,10 +255,14 @@ def get_prefix(prefix, imagename, outdir): dc = dd.load(psfname=psf, **ddict) pmodel, nmodel = dc.source_selection() - pmodel.save( prefix+".lsm.html") - nmodel.save( prefix+"_negative.lsm.html") + pmodel.save( prefix+".lsm.html") + nmodel.save( prefix+"_negative.lsm.html") + utils.xrun("tigger-convert", ["--rename -f", prefix + ".lsm.html"]) else: + + if not args.image: + raise SystemExit("sourcery: no image provided. System Exiting. Use sourcery -h for help.") images = args.image.split(",") psfs = args.psf.split(",") if args.psf else [None] @@ -290,22 +293,23 @@ def get_prefix(prefix, imagename, outdir): args.do_beam, savemask_neg=args.savemask_neg, savemask_pos=args.savemask_pos, no_smooth=args.disable_smoothing, **pybdsm_opts) + try: + # assignign reliability values + pmodel, nmodel, step = mc.get_reliability() - # assignign reliability values - pmodel, nmodel, step = mc.get_reliability() - - # direction dependent detection tagging + # direction dependent detection tagging - dc = dd.load(imagename=image, psfname=psf, pmodel=pmodel, nmodel=nmodel, - local_step=step, snr_thresh=args.snr_thresh, - high_corr_thresh=args.psfcorr_thresh, negdetec_region= - args.neg_region, negatives_thresh=args.num_negatives, - phasecenter_excl_radius=args.phase_center_rm, prefix=prefix, - loglevel=args.log_level) - # tagging - pmodel, nmodel = dc.source_selection() - pmodel.save( prefix+".lsm.html") - nmodel.save( prefix+"_negative.lsm.html") - utils.xrun("tigger-convert", ["--rename -f", prefix + ".lsm.html"]) - + dc = dd.load(imagename=image, psfname=psf, pmodel=pmodel, nmodel=nmodel, + local_step=step, snr_thresh=args.snr_thresh, + high_corr_thresh=args.psfcorr_thresh, negdetec_region= + args.neg_region, negatives_thresh=args.num_negatives, + phasecenter_excl_radius=args.phase_center_rm, prefix=prefix, + loglevel=args.log_level) + # tagging + pmodel, nmodel = dc.source_selection() + pmodel.save( prefix+".lsm.html") + nmodel.save( prefix+"_negative.lsm.html") + utils.xrun("tigger-convert", ["--rename -f", prefix + ".lsm.html"]) + except: + pass #TODO: delete any file that is not needed diff --git a/Sourcery/reliabilityestimates.py b/Sourcery/reliabilityestimates.py index 6e1f757..615e942 100644 --- a/Sourcery/reliabilityestimates.py +++ b/Sourcery/reliabilityestimates.py @@ -17,6 +17,7 @@ from Tigger.Coordinates import angular_dist_pos_angle as dist import sys from astLib.astWCS import WCS +import smooth_mask class load(object): @@ -210,9 +211,7 @@ def source_finder(self, image=None, thresh=None, prefix=None, tpos = None naxis = self.header["NAXIS1"] boundary = numpy.array([self.locstep, self.cfstep]) - #trim_box = (boundary.max(), naxis - boundary.max(), - # boundary.max(), naxis - boundary.max()) - trim_box = None + # data smoothing if self.smoothing: @@ -233,7 +232,6 @@ def source_finder(self, image=None, thresh=None, prefix=None, utils.sources_extraction( image=image, output=output, sourcefinder_name=self.sourcefinder_name, - trim_box=trim_box, prefix=self.prefix, **kw) if tpos: @@ -258,9 +256,6 @@ def remove_sources_within(self, model): def params(self, modelfits): - # reads in source finder output - with pyfits.open(modelfits) as hdu: - data = hdu[1].data tfile = tempfile.NamedTemporaryFile(suffix=".txt") tfile.flush() @@ -269,9 +264,17 @@ def params(self, modelfits): with open(tfile.name, "w") as std: std.write("#format:name ra_rad dec_rad i emaj_r emin_r pa_r\n") - model = Tigger.load(tfile.name) # open a tmp. file - + model = Tigger.load(tfile.name) # open a tmp. file peak, total, area, loc, corr = [], [], [], [], [] + + # reads in source finder output + try: + with pyfits.open(modelfits) as hdu: + data = hdu[1].data + except: + self.log.info("No Sources were found on %s therefore no reliability" + " estimations can be made."%self.imagename) + for i in range(len(data)): flux = data["Total_flux"][i] dc_emaj, dc_emin = data["DC_Maj"][i], data["DC_Min"][i] @@ -425,13 +428,17 @@ def get_reliability(self): pmodel, positive, labels = self.params(pfile) nmodel, negative, labels = self.params(nfile) - + # setting up a kernel, Gaussian kernel bandwidth = [] for plane in negative.T: - bandwidth.append(plane.std()) - + # just editing this to see if it might change anything, this method of + # computing sigma to avoid outliers. + Q3, Q2 = numpy.percentile(plane, [75, 25]) + IQR = Q3 - Q2 + bandwidth.append(min([plane.std(), IQR/1.34])) + #bandwidth.append(plane.std()) nplanes = len(labels) cov = numpy.zeros([nplanes, nplanes]) nnsrc = len(negative) @@ -469,7 +476,7 @@ def get_reliability(self): if self.do_psf_corr and self.derel: for s in pmodel.sources: cf, r = s.correlation_factor, s.rel - if cf < 0.006 and r > 0.60: + if cf < 0.09999: s.rel = 0.0 if self.makeplots: diff --git a/Sourcery/utils.py b/Sourcery/utils.py index 184a539..8f8f6c8 100644 --- a/Sourcery/utils.py +++ b/Sourcery/utils.py @@ -160,7 +160,7 @@ def thresh_mask(imagename, output, thresh, prefix=None, savemask=False): """ Create a threshhold mask """ - + hdu = pyfits.open(imagename) hdr = hdu[0].header @@ -180,14 +180,14 @@ def thresh_mask(imagename, output, thresh, thresh = thresh * noise mask = numpy.ones(data.shape) - + masked = 0 if smooth: log.info(" A masking threshold was set to %.2f"%(thresh/noise)) emin = hdr["BMIN"] emaj = hdr["BMAJ"] cell = abs(hdr["CDELT1"]) - beam = math.sqrt(emin*emaj)/cell - scales = [0.1, 2.0, 5.0, 10.0, 20.0, 40.0]#, 60.0] + beam = math.sqrt(emin * emaj)/cell + scales = [0.1, 0.3, 0.5, 1.0, 1.5, 1.9, 2.0, 5.0, 10.0]#, 60.0] smooth = None for scale in scales: @@ -195,32 +195,33 @@ def thresh_mask(imagename, output, thresh, kk = scale * beam kernel[-1] = kk kernel[-2] = kk - smooth = filters.gaussian_filter( data if smooth is None else smooth, kernel) - - mask *= smooth < thresh + mask *= smooth > thresh + masked += mask + masked[masked > 1] = 1 + masked = masked * data else: log.info(" No smoothing was made since the smooth was set to None") - mask = data < thresh + mask = data > thresh + masked = mask * data - hdu[0].data *= (mask==False) + hdu[0].data = masked #*= (mask==False) hdu.writeto(output, clobber=True) - - if savemask: - log.info(" Saving Masked images.") - mask = (mask== False) * 1 - ext = fits_ext(imagename) - outmask = prefix + "-mask.fits" or imagename.replace(ext,"-mask.fits") - - hdu[0].data = mask - - hdu.writeto(outmask, clobber=True) - log.info(" Masking an image %s was succesfull"%imagename) - - return mask==False, noise + #hdu.writeto("masked.fits", clobber=True) + #if savemask: + # log.info(" Saving Masked images.") + # mask = (mask == True) * 1 + # ext = fits_ext(imagename) + # outmask = prefix + "-mask.fits" or imagename.replace(ext,"-mask.fits") + # hdu[0].data = mask + + # hdu.writeto(outmask, clobber=True) + # log.info(" Masking an image %s was succesfull"%imagename) + + return masked, noise