diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 00000000000..bd3cd14944b
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,3 @@
+[BASIC]
+# Allow certain names for variables
+good-names=logger,Run,i,j,k,v,_
diff --git a/bin/all_sky_search/pycbc_bin_templates b/bin/all_sky_search/pycbc_bin_templates
index 8da9c9bc6f5..4cc9558ec1a 100755
--- a/bin/all_sky_search/pycbc_bin_templates
+++ b/bin/all_sky_search/pycbc_bin_templates
@@ -12,8 +12,8 @@ from pycbc.version import git_verbose_msg as version
from pycbc.events import background_bin_from_string
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--version', action='version', version=version)
-parser.add_argument('--verbose', action="count")
parser.add_argument("--ifo", type=str, required=True)
parser.add_argument("--f-lower", type=float, default=15.,
help='Enforce a uniform low frequency cutoff to '
diff --git a/bin/all_sky_search/pycbc_fit_sngls_binned b/bin/all_sky_search/pycbc_fit_sngls_binned
index 41a210acbaa..d4af2974e4a 100644
--- a/bin/all_sky_search/pycbc_fit_sngls_binned
+++ b/bin/all_sky_search/pycbc_fit_sngls_binned
@@ -126,11 +126,7 @@ if args.output_file is not None and len(args.stat_threshold) > 1:
raise RuntimeError("Cannot plot more than one threshold in a single "
"output file!")
-if args.verbose:
- log_level = logging.DEBUG
-else:
- log_level = logging.WARN
-logging.basicConfig(format='%(asctime)s : %(message)s', level=log_level)
+pycbc.init_logging(args.verbose)
statname = "reweighted SNR" if args.sngl_ranking == "new_snr" else \
args.sngl_ranking.replace("_", " ").replace("snr", "SNR")
diff --git a/bin/all_sky_search/pycbc_get_loudest_params b/bin/all_sky_search/pycbc_get_loudest_params
index 14fef61e945..c61f728f90d 100644
--- a/bin/all_sky_search/pycbc_get_loudest_params
+++ b/bin/all_sky_search/pycbc_get_loudest_params
@@ -13,8 +13,6 @@ from pycbc import init_logging
import pycbc.events
from pycbc.pnutils import mass1_mass2_to_mchirp_eta
-logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
-
parser = argparse.ArgumentParser(description=__doc__)
pycbc.add_common_pycbc_options(parser)
parser.add_argument('--single-ifo-trigs', type=str, required=True,
diff --git a/bin/all_sky_search/pycbc_make_bayestar_skymap b/bin/all_sky_search/pycbc_make_bayestar_skymap
index de1c22c0852..5d937a2818e 100644
--- a/bin/all_sky_search/pycbc_make_bayestar_skymap
+++ b/bin/all_sky_search/pycbc_make_bayestar_skymap
@@ -33,10 +33,9 @@ from pycbc.io import WaveformArray
from pycbc.io.ligolw import LIGOLWContentHandler
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--version', action="version",
version=pycbc.version.git_verbose_msg)
-parser.add_argument('--verbose', action='count',
- help="Make the logging increasingly more verbose")
parser.add_argument('--bayestar-executable',
help="The bayestar-localize-coinc executable to be run. "
"If not given, will use whatever is available in "
@@ -52,9 +51,9 @@ parser.add_argument('--output-file', required=True,
wavebank.add_approximant_arg(parser)
args, unknown = parser.parse_known_args()
-# Default logging is set higher than normal for this job
-logging_level = args.verbose + 1 if args.verbose else None
-init_logging(logging_level)
+# Default logging level is info: --verbose adds to this
+pycbc.init_logging(args.verbose, default_level=1)
+
logging.info("Starting")
bayestar_exe = args.bayestar_executable or 'bayestar-localize-coincs'
diff --git a/bin/all_sky_search/pycbc_prepare_xml_for_gracedb b/bin/all_sky_search/pycbc_prepare_xml_for_gracedb
index f915954884a..bb3058b357c 100755
--- a/bin/all_sky_search/pycbc_prepare_xml_for_gracedb
+++ b/bin/all_sky_search/pycbc_prepare_xml_for_gracedb
@@ -28,12 +28,13 @@ import h5py
import matplotlib
matplotlib.use('agg')
-import pycbc
import lal
import lal.series
from ligo.lw import lsctables
from ligo.lw import utils as ligolw_utils
from ligo.segments import segment, segmentlist
+
+import pycbc
from pycbc.io.ligolw import (
LIGOLWContentHandler,
snr_series_to_xml,
@@ -44,8 +45,7 @@ from pycbc.types import MultiDetOptionAction
from pycbc.results import generate_asd_plot, generate_snr_plot
parser = argparse.ArgumentParser(description=__doc__)
-parser.add_argument("--verbose", action='count',
- help="Increase logging level, default=info")
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--psd-files", nargs='+', required=True,
help='HDF file(s) containing the PSDs to upload')
parser.add_argument("--snr-timeseries", nargs='+', required=True,
@@ -72,11 +72,8 @@ parser.add_argument('--delta-f', type=float, default=0.25,
args = parser.parse_args()
-if args.verbose:
- args.verbose += 1
-else:
- args.verbose = 1
-pycbc.init_logging(args.verbose)
+# Default logging level is info: --verbose adds to this
+pycbc.init_logging(args.verbose, default_level=1)
xmldoc = ligolw_utils.load_filename(args.input_file,
contenthandler=LIGOLWContentHandler)
diff --git a/bin/all_sky_search/pycbc_upload_single_event_to_gracedb b/bin/all_sky_search/pycbc_upload_single_event_to_gracedb
index cef77526a78..03c0b4c7708 100755
--- a/bin/all_sky_search/pycbc_upload_single_event_to_gracedb
+++ b/bin/all_sky_search/pycbc_upload_single_event_to_gracedb
@@ -32,6 +32,7 @@ import lal
from pycbc.io.gracedb import gracedb_tag_with_version
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--xml-file-for-upload', required=True, type=str,
help='LIGOLW XML file containing the event.')
parser.add_argument('--log-message', type=str, metavar='MESSAGE',
@@ -62,7 +63,8 @@ parser.add_argument('--labels', nargs='+',
args = parser.parse_args()
-pycbc.init_logging(logging.INFO)
+# Default logging level is info: --verbose adds to this
+pycbc.init_logging(args.verbose, default_level=1)
# Make the scitokens logger a little quieter
# (it is called through GraceDB)
logging.getLogger('scitokens').setLevel(logging.root.level + 10)
diff --git a/bin/hwinj/pycbc_generate_hwinj_from_xml b/bin/hwinj/pycbc_generate_hwinj_from_xml
index 5c5e62866a2..09349eff4ad 100644
--- a/bin/hwinj/pycbc_generate_hwinj_from_xml
+++ b/bin/hwinj/pycbc_generate_hwinj_from_xml
@@ -60,9 +60,8 @@ parser.add_argument('--ifos', nargs='+', default=['H1', 'L1'], required=True,
# parse command line
opts = parser.parse_args()
-# setup logging - default level is DEBUG (2)
-logging_level = 2 if opts.verbose is None else opts.verbose + 2
-init_logging(logging_level)
+# Default logging level is debug: --verbose adds to this
+init_logging(args.verbose, default_level=2)
# read in injection LIGOLW XML file
logging.info('Reading injection file')
diff --git a/bin/hwinj/pycbc_insert_frame_hwinj b/bin/hwinj/pycbc_insert_frame_hwinj
index 0fa56998f1f..458f426e2e5 100644
--- a/bin/hwinj/pycbc_insert_frame_hwinj
+++ b/bin/hwinj/pycbc_insert_frame_hwinj
@@ -57,9 +57,8 @@ _strain.insert_strain_option_group(parser)
# parse command line
opts = parser.parse_args()
-# setup log: default is DEBUG (2)
-logging_level = 2 if opts.verbose is None else opts.verbose + 2
-init_logging(logging_level)
+# Default logging level is debug: --verbose adds to this
+init_logging(opts.verbose, default_level=2)
# get strain
strain = _strain.from_cli(opts, precision=opts.precision)
diff --git a/bin/inference/pycbc_inference_create_fits b/bin/inference/pycbc_inference_create_fits
index 626374fbef1..83bd29109bc 100644
--- a/bin/inference/pycbc_inference_create_fits
+++ b/bin/inference/pycbc_inference_create_fits
@@ -35,9 +35,11 @@ import numpy
import argparse
import subprocess
+from pycbc import add_common_pycbc_options, init_logging
from pycbc.inference import io
parser = argparse.ArgumentParser(description=__doc__)
+add_common_pycbc_options(parser)
parser.add_argument('--input-file', required=True,
help='The inference or posterior hdf file to load.')
parser.add_argument('--output-file', required=True,
@@ -68,6 +70,8 @@ parser.add_argument('--tc', default='tc',
'"tc".')
opts = parser.parse_args()
+init_logging(opts.verbose)
+
fp = io.loadfile(opts.input_file, 'r')
samples = fp.read_samples([opts.ra, opts.dec, opts.distance, opts.tc])
fp.close()
diff --git a/bin/inference/pycbc_inference_plot_gelman_rubin b/bin/inference/pycbc_inference_plot_gelman_rubin
index fd3e649d397..9f3be01d50d 100644
--- a/bin/inference/pycbc_inference_plot_gelman_rubin
+++ b/bin/inference/pycbc_inference_plot_gelman_rubin
@@ -19,19 +19,20 @@
import argparse
import logging
-import matplotlib as mpl; mpl.use("Agg")
+import matplotlib
+matplotlib.use("Agg")
import matplotlib.pyplot as plt
import sys
-from pycbc import results
-from pycbc import __version__
+
+from pycbc import (
+ __version__, results, init_logging, add_common_pycbc_options
+)
from pycbc.inference import (gelman_rubin, io, option_utils)
# add options to command line
parser = io.ResultsArgumentParser(skip_args=['walkers'])
-# verbose option
-parser.add_argument("--verbose", action="store_true", default=False,
- help="Print logging info.")
+add_common_pycbc_options(parser)
parser.add_argument('--version', action='version', version=__version__,
help='show version number and exit')
@@ -55,11 +56,7 @@ parser.add_argument("--segment-step", type=int, required=True,
opts = parser.parse_args()
# setup log
-if opts.verbose:
- log_level = logging.DEBUG
-else:
- log_level = logging.WARN
-logging.basicConfig(format="%(asctime)s : %(message)s", level=log_level)
+init_logging(opts.verbose)
# enfore that this is not a single iteration
if opts.iteration is not None:
diff --git a/bin/inference/pycbc_inference_plot_mcmc_history b/bin/inference/pycbc_inference_plot_mcmc_history
index 6d19ae0d546..e66c6b33fe4 100644
--- a/bin/inference/pycbc_inference_plot_mcmc_history
+++ b/bin/inference/pycbc_inference_plot_mcmc_history
@@ -31,6 +31,7 @@ from pycbc.inference import io
from pycbc.results import metadata
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--input-file', type=str, required=True,
help='Path to the input HDF file.')
parser.add_argument('--output-file', type=str, required=True,
@@ -48,7 +49,8 @@ parser.add_argument('-b', '--plot-nchains-burned-in', action='store_true',
'ensemble samplers, this will be all or nothing.')
opts = parser.parse_args()
-pycbc.init_logging(True)
+# Default logging level is info: --verbose adds to this
+pycbc.init_logging(opts.verbose, default_level=1)
nplots = sum([opts.plot_act, opts.plot_effective_nsamples,
opts.plot_nchains_burned_in, opts.plot_checkpoint_dt])
diff --git a/bin/inference/pycbc_inference_plot_skymap b/bin/inference/pycbc_inference_plot_skymap
index ed544eb2cdb..a2660e48c78 100644
--- a/bin/inference/pycbc_inference_plot_skymap
+++ b/bin/inference/pycbc_inference_plot_skymap
@@ -31,8 +31,10 @@ import sys
import subprocess
from PIL import Image, PngImagePlugin
+from pycbc import add_common_pycbc_options, init_logging
parser = argparse.ArgumentParser(description=__doc__)
+add_common_pycbc_options(parser)
parser.add_argument('--input-file', required=True,
help='Input fits file"')
parser.add_argument('--output-file', required=True,
@@ -42,6 +44,8 @@ parser.add_argument('--colormap',
opts = parser.parse_args()
+init_logging(opts.verbose)
+
cmd = 'ligo-skymap-plot {} -o {} --annotate --contour 50 90'.format(
opts.input_file, opts.output_file)
if opts.colormap is not None:
diff --git a/bin/inference/pycbc_inference_plot_thermodynamic_integrand b/bin/inference/pycbc_inference_plot_thermodynamic_integrand
index ab525ec97f0..3a82a173899 100644
--- a/bin/inference/pycbc_inference_plot_thermodynamic_integrand
+++ b/bin/inference/pycbc_inference_plot_thermodynamic_integrand
@@ -31,10 +31,13 @@ matplotlib.use("agg")
from matplotlib import rc
import matplotlib.pyplot as plt
import numpy
+
+from pycbc import add_common_pycbc_options, init_logging
from pycbc.inference import io
import pycbc.version
parser = argparse.ArgumentParser()
+add_common_pycbc_options(parser)
parser.add_argument("--inference-file", type=str,
help="The PyCBC multi-temper inference file.")
parser.add_argument("--thin-start", type=int, default=None,
@@ -52,6 +55,8 @@ parser.add_argument("--integrand-logarithmic", action="store_true",
parser.add_argument("--output-file", type=str)
args = parser.parse_args()
+init_logging(args.verbose)
+
# Read in the necessary data
fp = io.loadfile(args.inference_file, "r")
logl = fp.read_samples("loglikelihood", thin_start=args.thin_start,
diff --git a/bin/inference/pycbc_inference_start_from_samples b/bin/inference/pycbc_inference_start_from_samples
index 6ba563048dc..03e900aee96 100644
--- a/bin/inference/pycbc_inference_start_from_samples
+++ b/bin/inference/pycbc_inference_start_from_samples
@@ -6,10 +6,13 @@ import argparse
import numpy
import h5py
from numpy.random import choice
+
+from pycbc import add_common_pycbc_options, init_logging
from pycbc.inference.io import loadfile
from pycbc.inference.sampler import samplers
parser = argparse.ArgumentParser()
+add_common_pycbc_options(parser)
parser.add_argument('--input-file')
parser.add_argument('--output-file')
parser.add_argument('--sampler', default='emcee_pt',
@@ -18,6 +21,8 @@ parser.add_argument('--ntemps', type=int)
parser.add_argument('--nwalkers', type=int)
args = parser.parse_args()
+init_logging(opts.verbose)
+
# populate an emcee start file with
# values chosen from a dynesty file
# each temperature and walker will get a random
diff --git a/bin/inference/pycbc_validate_test_posterior b/bin/inference/pycbc_validate_test_posterior
index d1df7a9d1cf..92015726658 100644
--- a/bin/inference/pycbc_validate_test_posterior
+++ b/bin/inference/pycbc_validate_test_posterior
@@ -5,15 +5,21 @@ test posterior model.
import sys
import numpy
import argparse
-from matplotlib import use; use('Agg')
+from matplotlib import use
+use('Agg')
import pylab
+
+from scipy.stats import gaussian_kde, ks_2samp
+
from pycbc.distributions.utils import prior_from_config
from pycbc.inference import models, io
-from scipy.stats import gaussian_kde, ks_2samp
from pycbc.io import FieldArray
+from pycbc import add_common_pycbc_options, init_logging
+
numpy.random.seed(0)
parser = argparse.ArgumentParser()
+add_common_pycbc_options(parser)
parser.add_argument('--input-file', help='inference posterior file')
parser.add_argument('--output-file', help='diagnostic plot')
parser.add_argument('--p-value-threshold', help='minimum ks test p-value',
@@ -22,6 +28,8 @@ parser.add_argument('--ind-samples', help='use only this number of samples',
default=1000, type=int)
args = parser.parse_args()
+init_logging(args.verbose)
+
size = int(1e6)
d1 = io.loadfile(args.input_file, 'r')
diff --git a/bin/minifollowups/pycbc_foreground_minifollowup b/bin/minifollowups/pycbc_foreground_minifollowup
index 24e847ef57a..6278a4773d6 100644
--- a/bin/minifollowups/pycbc_foreground_minifollowup
+++ b/bin/minifollowups/pycbc_foreground_minifollowup
@@ -69,8 +69,7 @@ wf.add_workflow_settings_cli(parser, include_subdax_opts=True)
args = parser.parse_args()
# Default logging level is info: --verbose adds to this
-log_level = 1 if args.verbose is None else args.verbose + 1
-init_logging(log_level)
+init_logging(args.verbose, default_level=1)
workflow = wf.Workflow(args)
diff --git a/bin/minifollowups/pycbc_injection_minifollowup b/bin/minifollowups/pycbc_injection_minifollowup
index 7bf5992f16c..e076ed16a47 100644
--- a/bin/minifollowups/pycbc_injection_minifollowup
+++ b/bin/minifollowups/pycbc_injection_minifollowup
@@ -163,8 +163,7 @@ args = parser.parse_args()
# Default logging level is info: --verbose adds to this
-log_level = 1 if args.verbose is None else args.verbose + 1
-init_logging(log_level)
+init_logging(args.verbose, default_level=1)
workflow = wf.Workflow(args)
diff --git a/bin/minifollowups/pycbc_sngl_minifollowup b/bin/minifollowups/pycbc_sngl_minifollowup
index 85dda66504a..206066288ea 100644
--- a/bin/minifollowups/pycbc_sngl_minifollowup
+++ b/bin/minifollowups/pycbc_sngl_minifollowup
@@ -97,8 +97,7 @@ stat.insert_statistic_option_group(parser,
args = parser.parse_args()
# Default logging level is info: --verbose adds to this
-log_level = 1 if args.verbose is None else args.verbose + 1
-init_logging(log_level)
+init_logging(args.verbose, default_level=1)
workflow = wf.Workflow(args)
workflow.ifos = [args.instrument]
diff --git a/bin/minifollowups/pycbc_upload_prep_minifollowup b/bin/minifollowups/pycbc_upload_prep_minifollowup
index ecee01fb0ee..fdfe3a5bdfa 100644
--- a/bin/minifollowups/pycbc_upload_prep_minifollowup
+++ b/bin/minifollowups/pycbc_upload_prep_minifollowup
@@ -66,8 +66,7 @@ wf.add_workflow_settings_cli(parser, include_subdax_opts=True)
args = parser.parse_args()
# Default logging level is info: --verbose adds to this
-log_level = 1 if args.verbose is None else args.verbose + 1
-init_logging(log_level)
+init_logging(args.verbose, default_level=1)
workflow = wf.Workflow(args)
diff --git a/bin/plotting/pycbc_banksim_plot_eff_fitting_factor b/bin/plotting/pycbc_banksim_plot_eff_fitting_factor
index 9e5b42ab3a9..5814c7aaceb 100644
--- a/bin/plotting/pycbc_banksim_plot_eff_fitting_factor
+++ b/bin/plotting/pycbc_banksim_plot_eff_fitting_factor
@@ -14,17 +14,21 @@
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-""" Plot effective fitting factor from a bunch of point-source files.
+"""
+Plot effective fitting factor vs mass1 and mass2 from various
+point-source files.
"""
import sys
import h5py
import numpy
import argparse
+import logging
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
+from pycbc import init_logging, add_common_pycbc_options
import pycbc.version
from pycbc import results
@@ -34,7 +38,8 @@ __date__ = pycbc.version.date
__program__ = "pycbc_banksim_plot_eff_fitting_factor"
parser = argparse.ArgumentParser(usage='',
- description="Plot effective fitting factor vs mass1 and mass2.")
+ description=__doc__)
+add_common_pycbc_options(parser)
parser.add_argument("--version", action="version", version=__version__)
parser.add_argument('--input-files', nargs='+', default=None, required=True,
help="List of input files.")
@@ -57,6 +62,8 @@ parser.add_argument('--log-colorbar', action='store_true', default=False,
opt = parser.parse_args()
+init_logging(opt.verbose)
+
m1 = []
m2 = []
eff_ff = []
diff --git a/bin/plotting/pycbc_banksim_plot_fitting_factors b/bin/plotting/pycbc_banksim_plot_fitting_factors
index 38cc3a457d0..e5a1c2736f0 100644
--- a/bin/plotting/pycbc_banksim_plot_fitting_factors
+++ b/bin/plotting/pycbc_banksim_plot_fitting_factors
@@ -21,6 +21,7 @@ import sys
import h5py
import argparse
import numpy
+import logging
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
@@ -36,6 +37,7 @@ __program__ = "pycbc_banksim_plot_fitting_factors"
parser = argparse.ArgumentParser(usage='',
description="Plot fitting factor distribution.")
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version", version=__version__)
parser.add_argument('--input-file', default=None, required=True,
help="List of input files.")
@@ -54,6 +56,8 @@ parser.add_argument('--plot-caption',
opt = parser.parse_args()
+pycbc.init_logging(opt.verbose)
+
curr_fp = h5py.File(opt.input_file, 'r')
m1 = curr_fp['inj_params/mass1'][:]
m2 = curr_fp['inj_params/mass2'][:]
diff --git a/bin/plotting/pycbc_banksim_table_point_injs b/bin/plotting/pycbc_banksim_table_point_injs
index d1c999d38f5..df1d96f0db1 100644
--- a/bin/plotting/pycbc_banksim_table_point_injs
+++ b/bin/plotting/pycbc_banksim_table_point_injs
@@ -21,6 +21,7 @@ import sys
import h5py
import argparse
import numpy
+import logging
import pycbc.version
from pycbc import results
@@ -32,6 +33,7 @@ __program__ = "pycbc_banksim_table_point_injs"
parser = argparse.ArgumentParser(usage='',
description="Plot effective fitting factor vs mass1 and mass2.")
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version", version=__version__)
parser.add_argument('--input-files', nargs='+', default=None, required=True,
help="List of input files.")
@@ -42,6 +44,8 @@ parser.add_argument('--output-file', default=None, required=True,
opt = parser.parse_args()
+pycbc.init_logging(opt.verbose)
+
col_names = ['Mass 1', 'Mass 2', 'Signal
recovery
fraction',
'Effective
fitting
factor',
'Maximum
fitting
factor',
diff --git a/bin/plotting/pycbc_create_html_snippet b/bin/plotting/pycbc_create_html_snippet
index bb956d092a9..f7b86dfb9ed 100644
--- a/bin/plotting/pycbc_create_html_snippet
+++ b/bin/plotting/pycbc_create_html_snippet
@@ -18,11 +18,14 @@
import sys
import argparse
+
+from pycbc import init_logging, add_common_pycbc_options
import pycbc.version
import pycbc.results
# parse command line
parser = argparse.ArgumentParser()
+add_common_pycbc_options(parser)
parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg)
parser.add_argument('--output-file', type=str,
help='Path of the output HTML file.')
@@ -32,6 +35,8 @@ parser.add_argument('--title', type=str, default="Title",
help="Title of figure for results webpage.")
opts = parser.parse_args()
+init_logging(opts.verbose)
+
# set caption beginning
caption = opts.html_text
diff --git a/bin/plotting/pycbc_faithsim_plots b/bin/plotting/pycbc_faithsim_plots
index 8018e0025ca..4d692e1f0e8 100755
--- a/bin/plotting/pycbc_faithsim_plots
+++ b/bin/plotting/pycbc_faithsim_plots
@@ -7,14 +7,15 @@ that compare two approximants and compute the match between them.
import argparse
import matplotlib
-
matplotlib.use("Agg")
import matplotlib.cm
+from matplotlib.ticker import MultipleLocator, FormatStrFormatter
import pylab
import numpy as np
+
from ligo.lw import utils, table, lsctables
-from matplotlib.ticker import MultipleLocator, FormatStrFormatter
-from pycbc import pnutils
+
+from pycbc import pnutils, init_logging, add_common_pycbc_options
from pycbc.conversions import (
mtotal_from_mass1_mass2,
q_from_mass1_mass2,
@@ -114,6 +115,7 @@ def basic_scatter(
parser = argparse.ArgumentParser(description=__doc__)
+add_common_pycbc_options(parser)
parser.add_argument(
"--input-file",
required=True,
@@ -133,6 +135,8 @@ parser.add_argument("--output-plot", required=True, help="name of the output plo
args = parser.parse_args()
+init_logging(args.verbose)
+
derived_func_map = {
"total_mass": lambda d: mtotal_from_mass1_mass2(d["mass1"], d["mass2"]),
"mass_ratio": lambda d: q_from_mass1_mass2(d["mass1"], d["mass2"]),
diff --git a/bin/plotting/pycbc_ifar_catalog b/bin/plotting/pycbc_ifar_catalog
index 3924949640e..59757d018f7 100644
--- a/bin/plotting/pycbc_ifar_catalog
+++ b/bin/plotting/pycbc_ifar_catalog
@@ -19,21 +19,23 @@ import argparse
import h5py
import numpy
import sys
+import logging
import matplotlib as mpl; mpl.use('Agg')
import pylab
+
+from scipy.stats import norm, poisson
+
import pycbc.results
import pycbc.version
from pycbc import conversions
-from scipy.stats import norm, poisson
-import logging
-from pycbc import init_logging
+from pycbc import init_logging, add_common_pycbc_options
parser = argparse.ArgumentParser(usage='pycbc_ifar_catalog [--options]',
description='Plots cumulative IFAR vs count for'
' foreground triggers')
+add_common_pycbc_options(parser)
parser.add_argument('--version', action='version',
version=pycbc.version.git_verbose_msg)
-parser.add_argument('--verbose', action='count')
parser.add_argument('--trigger-files', nargs='+',
help='Path to coincident trigger HDF file(s)')
parser.add_argument('--output-file', required=True,
diff --git a/bin/plotting/pycbc_mass_area_plot b/bin/plotting/pycbc_mass_area_plot
index 80495f45f97..486cd3d7741 100644
--- a/bin/plotting/pycbc_mass_area_plot
+++ b/bin/plotting/pycbc_mass_area_plot
@@ -9,15 +9,18 @@ the m1 & m2 plane when given a central mchirp value and uncertainty.
"""
import argparse
-from pycbc.mchirp_area import calc_areas
-from pycbc.mchirp_area import src_mass_from_z_det_mass
-from pycbc.conversions import mass2_from_mchirp_mass1 as m2mcm1
import numpy
from matplotlib import use; use("Agg")
from matplotlib import pyplot
+from pycbc import init_logging, add_common_pycbc_options
+from pycbc.mchirp_area import calc_areas
+from pycbc.mchirp_area import src_mass_from_z_det_mass
+from pycbc.conversions import mass2_from_mchirp_mass1 as m2mcm1
+
# ARGUMENT PARSER
parser = argparse.ArgumentParser()
+add_common_pycbc_options(parser)
parser.add_argument("--central-mc", type=float, help="Central value of mchirp")
parser.add_argument("--delta-mc", type=float, help="Uncertainty for mchirp")
parser.add_argument("--min-m2", type=float, help="Minimum value for m2")
@@ -29,6 +32,8 @@ parser.add_argument("--gap-max", type=float, help="Minimum black hole mass")
args = parser.parse_args()
+init_logging(args.verbose)
+
if args.central_mc and args.delta_mc:
central_mc = float(args.central_mc)
delta_mc = float(args.delta_mc)
diff --git a/bin/plotting/pycbc_mchirp_plots b/bin/plotting/pycbc_mchirp_plots
index 2b19bbad13e..49003497a6b 100644
--- a/bin/plotting/pycbc_mchirp_plots
+++ b/bin/plotting/pycbc_mchirp_plots
@@ -9,13 +9,16 @@ plane as a function of central mchirp value.
"""
import argparse
-from pycbc.mchirp_area import calc_areas
import numpy
from matplotlib import use; use("Agg")
from matplotlib import pyplot
+from pycbc import init_logging, add_common_pycbc_options
+from pycbc.mchirp_area import calc_areas
+
# ARGUMENT PARSER
parser = argparse.ArgumentParser()
+add_common_pycbc_options(parser)
parser.add_argument("--min-m2", type=float, help="Minimum value for m2")
parser.add_argument("--max-m1", type=float, help="Maximum value for m1")
parser.add_argument("--ns-max", type=float, help="Maximum neutron star mass")
@@ -24,6 +27,8 @@ parser.add_argument("--central-z", type=float, help="Central redshift value")
parser.add_argument("--delta-z", type=float, help="Redshift uncertainty")
args = parser.parse_args()
+init_logging(args.verbose)
+
if args.min_m2 and args.max_m1:
m1_max = float(args.max_m1)
m2_min = float(args.min_m2)
diff --git a/bin/plotting/pycbc_page_banktriggerrate b/bin/plotting/pycbc_page_banktriggerrate
index ba0facac87d..ba1749420f5 100644
--- a/bin/plotting/pycbc_page_banktriggerrate
+++ b/bin/plotting/pycbc_page_banktriggerrate
@@ -5,13 +5,18 @@ import matplotlib
matplotlib.use('Agg')
import numpy, argparse, h5py, os, pylab, pycbc.pnutils
-parser = argparse.ArgumentParser()
+from pycbc import init_logging, add_common_pycbc_options
+
+parser = argparse.ArgumentParser(description=__doc__)
+add_common_pycbc_options(parser)
parser.add_argument('--trigger-files', nargs='+')
parser.add_argument('--bank-file')
parser.add_argument('--output-file')
parser.add_argument('--chisq-bins')
args = parser.parse_args()
+init_logging(args.verbose)
+
bf = h5py.File(args.bank_file)
m1 = bf['mass1'][:]
m2 = bf['mass2'][:]
diff --git a/bin/plotting/pycbc_page_coinc_snrchi b/bin/plotting/pycbc_page_coinc_snrchi
index 5208387d3b9..f0379ad9693 100644
--- a/bin/plotting/pycbc_page_coinc_snrchi
+++ b/bin/plotting/pycbc_page_coinc_snrchi
@@ -8,7 +8,7 @@ import pylab, pycbc.results
from pycbc.io import (
get_chisq_from_file_choice, chisq_choices, SingleDetTriggers
)
-from pycbc import conversions
+from pycbc import conversions, init_logging, add_common_pycbc_options
from pycbc.detector import Detector
import pycbc.version
@@ -19,6 +19,7 @@ def snr_from_chisq(chisq, newsnr, q=6.):
return snr
parser = argparse.ArgumentParser()
+add_common_pycbc_options(parser)
parser.add_argument("--version", action="version",
version=pycbc.version.git_verbose_msg)
parser.add_argument('--found-injection-file', required=True,
@@ -47,6 +48,7 @@ parser.add_argument('--chisq-choice', choices=chisq_choices,
parser.add_argument('--output-file', required=True)
args = parser.parse_args()
+init_logging(args.verbose)
# First - check the IFO being used
with h5py.File(args.single_trigger_file, 'r') as stf:
ifo = tuple(stf.keys())[0]
diff --git a/bin/plotting/pycbc_page_dq_table b/bin/plotting/pycbc_page_dq_table
index 83b39b01569..db9fba5200d 100644
--- a/bin/plotting/pycbc_page_dq_table
+++ b/bin/plotting/pycbc_page_dq_table
@@ -10,14 +10,16 @@ import pycbc
import pycbc.results
from pycbc.version import git_verbose_msg as version
-parser = argparse.ArgumentParser()
+parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version", version=version)
parser.add_argument('--ifo', required=True)
parser.add_argument('--dq-file', required=True)
-parser.add_argument('--verbose', action='count')
parser.add_argument('--output-file')
args = parser.parse_args()
+pycbc.init_logging(args.verbose)
+
dq_states = {
'dq_state_0': 'Clean',
'dq_state_1': 'DQ Flag',
diff --git a/bin/plotting/pycbc_page_foreground b/bin/plotting/pycbc_page_foreground
index b33adcb426a..2b1f69b2ed0 100755
--- a/bin/plotting/pycbc_page_foreground
+++ b/bin/plotting/pycbc_page_foreground
@@ -9,6 +9,7 @@ import argparse
import logging
import numpy
import h5py
+
import pycbc
import pycbc.results
import pycbc.version
@@ -17,12 +18,12 @@ from pycbc.pnutils import mass1_mass2_to_mchirp_eta
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version",
version=pycbc.version.git_verbose_msg)
parser.add_argument('--trigger-file', required=True)
parser.add_argument('--bank-file', required=True)
parser.add_argument('--single-detector-triggers', nargs='+')
-parser.add_argument('--verbose', action='count')
parser.add_argument('--output-file', required=True)
parser.add_argument('--num-to-write', type=int)
parser.add_argument('--use-hierarchical-level', type=int,
diff --git a/bin/plotting/pycbc_page_foundmissed b/bin/plotting/pycbc_page_foundmissed
index e98e0982a58..8b225ff8c1c 100644
--- a/bin/plotting/pycbc_page_foundmissed
+++ b/bin/plotting/pycbc_page_foundmissed
@@ -1,12 +1,19 @@
#!/usr/bin/env python
""" Plot found and missed injections.
"""
-import h5py, numpy, logging, os.path, argparse, sys
-import matplotlib; matplotlib.use('Agg')
+import h5py
+import numpy
+import logging
+import os.path
+import argparse
+import sys
+import matplotlib
+matplotlib.use('Agg')
import matplotlib.pyplot as plot
+
import pycbc.results.followup, pycbc.pnutils, pycbc.results, pycbc.version
import pycbc.pnutils
-from pycbc import init_logging
+from pycbc import init_logging, add_common_pycbc_options
from pycbc.detector import Detector
labels={'mchirp': 'Chirp Mass',
@@ -26,6 +33,7 @@ labels={'mchirp': 'Chirp Mass',
}
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--injection-file',
help="The hdf injection file to plot", required=True)
parser.add_argument('--axis-type', default='mchirp', choices=['mchirp',
@@ -41,7 +49,6 @@ parser.add_argument('--plot-all-distance', action='store_true', default=False,
"the plot will be truncated below 1")
parser.add_argument('--colormap',default='cividis_r',
help="Type of colormap to be used for the plots.")
-parser.add_argument('--verbose', action='count')
parser.add_argument('--log-distance', action='store_true', default=False)
parser.add_argument('--dynamic', action='store_true', default=False)
parser.add_argument('--gradient-far', action='store_true',
diff --git a/bin/plotting/pycbc_page_ifar b/bin/plotting/pycbc_page_ifar
index fb33964dd2c..7dbf492e1e6 100644
--- a/bin/plotting/pycbc_page_ifar
+++ b/bin/plotting/pycbc_page_ifar
@@ -19,14 +19,18 @@ import argparse
import h5py
import numpy
import sys
-import matplotlib as mpl; mpl.use('Agg')
+import copy
import pylab
+import matplotlib as mpl
+mpl.use('Agg')
+
+from ligo import segments
+
+from pycbc import init_logging, add_common_pycbc_options
import pycbc.results
import pycbc.version
-import copy
from pycbc.events import veto
from pycbc import conversions as conv
-from ligo import segments
def calculate_time_slide_duration(pifo_segments, fifo_segments, offset=0):
''' Returns the amount of coincident time between two segmentlists.
@@ -47,6 +51,7 @@ parser = argparse.ArgumentParser(usage='pycbc_page_ifar [--options]',
description='Plots a cumulative histogram of IFAR for'
'coincident foreground triggers and a subset of'
'the coincident time slide triggers.')
+add_common_pycbc_options(parser)
parser.add_argument('--version', action='version',
version=pycbc.version.git_verbose_msg)
parser.add_argument('--trigger-file', type=str, required=True,
@@ -88,6 +93,8 @@ parser.add_argument('--open-box', action='store_true', default=False,
help='Show the foreground triggers on the output plot. ')
opts = parser.parse_args()
+init_logging(opts.verbose)
+
# read file
fp = h5py.File(opts.trigger_file, 'r')
diff --git a/bin/plotting/pycbc_page_injtable b/bin/plotting/pycbc_page_injtable
index fe4ea65b359..18afa62785e 100644
--- a/bin/plotting/pycbc_page_injtable
+++ b/bin/plotting/pycbc_page_injtable
@@ -1,22 +1,36 @@
#!/usr/bin/env python
""" Make a table of found injection information
"""
-import argparse, h5py, numpy as np, pycbc.results, pycbc.detector, sys
-from pycbc.types import MultiDetOptionAction
-import pycbc.pnutils, pycbc.events
-import pycbc.version
+import argparse
+import h5py
+import numpy as np
+import sys
from itertools import combinations
+import pycbc.results
+import pycbc.detector
+import pycbc.pnutils
+import pycbc.events
+import pycbc.version
+from pycbc import add_common_pycbc_options, init_logging
+from pycbc.types import MultiDetOptionAction
+
-parser = argparse.ArgumentParser()
-parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg)
-parser.add_argument('--injection-file', help='HDF File containing the matched injections')
-parser.add_argument('--single-trigger-files', nargs='*', help="HDF format single detector trigger files", action=MultiDetOptionAction)
-parser.add_argument('--verbose', action='count')
+parser = argparse.ArgumentParser(description=__doc__)
+add_common_pycbc_options(parser)
+parser.add_argument("--version", action="version",
+ version=pycbc.version.git_verbose_msg)
+parser.add_argument('--injection-file',
+ help='HDF File containing the matched injections')
+parser.add_argument('--single-trigger-files', nargs='*',
+ action=MultiDetOptionAction,
+ help="HDF format single detector trigger files")
parser.add_argument('--show-missed', action='store_true')
parser.add_argument('--output-file')
args = parser.parse_args()
+init_logging(args.verbose)
+
f = h5py.File(args.injection_file,'r')
inj = f['injections']
found_cols, found_names, found_formats = [], [], []
diff --git a/bin/plotting/pycbc_page_recovery b/bin/plotting/pycbc_page_recovery
index 0fef2fb86c4..e8ba399de6b 100644
--- a/bin/plotting/pycbc_page_recovery
+++ b/bin/plotting/pycbc_page_recovery
@@ -10,7 +10,7 @@ from pycbc import pnutils, results
from pycbc.events import triggers
parser = argparse.ArgumentParser()
-parser.add_argument("--verbose", action="count")
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version",
version=pycbc.version.git_verbose_msg)
parser.add_argument("--injection-file", required=True,
@@ -55,9 +55,7 @@ if args.x_param is not None and "v_param" not in args.plot_type:
if "v_param" in args.plot_type and args.x_param is None:
raise RuntimeError("Need an --x-param to plot errors against!")
-if args.verbose:
- log_level = logging.INFO
- logging.basicConfig(format="%(asctime)s : %(message)s", level=log_level)
+pycbc.init_logging(args.verbose)
logging.info("Reading data...")
injs = h5py.File(args.injection_file, "r")
diff --git a/bin/plotting/pycbc_page_segments b/bin/plotting/pycbc_page_segments
index bf74ba01bb8..447812b9713 100644
--- a/bin/plotting/pycbc_page_segments
+++ b/bin/plotting/pycbc_page_segments
@@ -1,21 +1,31 @@
#!/usr/bin/env python
""" Make interactive visualization of segments
"""
-import argparse, pycbc.version
+import argparse
from itertools import cycle
import matplotlib
matplotlib.use('Agg')
-import numpy, pylab, pycbc.events, mpld3, mpld3.plugins
+import numpy
+import pylab
+import mpld3
+import mpld3.plugins
from matplotlib.patches import Rectangle
+
+import pycbc.version
+import pycbc.events
from pycbc.results.mpld3_utils import MPLSlide, Tooltip
parser = argparse.ArgumentParser()
-parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg)
-parser.add_argument('--segment-files', nargs='+', help="List of segment files to plot")
+pycbc.add_common_pycbc_options(parser)
+parser.add_argument('--version', action='version',
+ version=pycbc.version.git_verbose_msg)
+parser.add_argument('--segment-files', nargs='+',
+ help="List of segment files to plot")
parser.add_argument('--output-file', help="output html file")
args = parser.parse_args()
+pycbc.init_logging(args.verbose)
def timestr(s):
t = ""
diff --git a/bin/plotting/pycbc_page_segplot b/bin/plotting/pycbc_page_segplot
index e17cfeac3ec..17172be0ba8 100644
--- a/bin/plotting/pycbc_page_segplot
+++ b/bin/plotting/pycbc_page_segplot
@@ -24,6 +24,8 @@ import itertools, datetime, time
import sys
from itertools import cycle
from matplotlib.patches import Rectangle
+
+from pycbc import add_common_pycbc_options, init_logging
from pycbc.events.veto import get_segment_definer_comments
from pycbc.results.color import ifo_color
from pycbc.results.mpld3_utils import MPLSlide, LineTooltip
@@ -32,6 +34,7 @@ import pycbc.version
# parse command line
parser = argparse.ArgumentParser()
+add_common_pycbc_options(parser)
parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg)
parser.add_argument('--segment-files', type=str, nargs="+",
help='XML files with a segment definer table to read.')
@@ -43,6 +46,8 @@ parser.add_argument('--ifos', nargs='+', default=['H1', 'L1'],
help='Space-separated list of IFOs to plot, default H1 L1.')
opts = parser.parse_args()
+init_logging(opts.verbose)
+
def timestr(s):
""" Takes seconds and returns a human-readable string for the amount
of time.
diff --git a/bin/plotting/pycbc_page_segtable b/bin/plotting/pycbc_page_segtable
index ab0534bb639..08b0597d657 100644
--- a/bin/plotting/pycbc_page_segtable
+++ b/bin/plotting/pycbc_page_segtable
@@ -21,12 +21,14 @@ import logging
import numpy
import pycbc.results
import sys
+import itertools
+
from ligo import segments
+
from pycbc.events.veto import get_segment_definer_comments
from pycbc.results import save_fig_with_metadata
from pycbc.workflow import SegFile
import pycbc.version
-import itertools
def powerset_ifos(ifo_set):
combo_set = []
@@ -36,6 +38,7 @@ def powerset_ifos(ifo_set):
# parse command line
parser = argparse.ArgumentParser()
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version",
version=pycbc.version.git_verbose_msg)
parser.add_argument('--segment-files', type=str, nargs="+",
@@ -53,8 +56,7 @@ parser.add_argument('--ifos', nargs='+', default=['H1', 'L1'],
opts = parser.parse_args()
# setup log
-logging.basicConfig(format='%(asctime)s:%(levelname)s : %(message)s',
- level=logging.INFO,datefmt='%I:%M:%S')
+pycbc.init_logging(opts.verbose)
# create list of combinations of detectors
ifo_combinations = powerset_ifos(opts.ifos)
diff --git a/bin/plotting/pycbc_page_sensitivity b/bin/plotting/pycbc_page_sensitivity
index 37e591ce126..653d3af9b91 100755
--- a/bin/plotting/pycbc_page_sensitivity
+++ b/bin/plotting/pycbc_page_sensitivity
@@ -1,16 +1,27 @@
#!/usr/bin/python
""" Plot search sensitivity as a function of significance.
"""
-import argparse, h5py, numpy, logging, matplotlib, sys
+import argparse
+import h5py
+import numpy
+import logging
+import matplotlib
+import sys
matplotlib.use('Agg')
from matplotlib.pyplot import cm
-import pylab, pycbc.pnutils, pycbc.results, pycbc, pycbc.version
+import pylab
+
+import pycbc.pnutils
+import pycbc.results
+import pycbc
+import pycbc.version
from pycbc import sensitivity
from pycbc import conversions as conv
parser = argparse.ArgumentParser(description=__doc__)
-parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg)
-parser.add_argument('--verbose', action='count')
+pycbc.add_common_pycbc_options(parser)
+parser.add_argument('--version', action='version',
+ version=pycbc.version.git_verbose_msg)
parser.add_argument('--injection-file', nargs='+',
help="Required. HDF format injection result file or space "
"separated list of files")
@@ -86,9 +97,7 @@ if args.integration_method == 'mc' and (args.distance_param is None or \
if args.integration_method == 'mc' and args.limits_param is None:
args.limits_param = args.distance_param
-if args.verbose:
- log_level = logging.INFO
- logging.basicConfig(format='%(asctime)s : %(message)s', level=log_level)
+pycbc.init_logging(args.verbose)
logging.info('Read in the data')
diff --git a/bin/plotting/pycbc_page_snrchi b/bin/plotting/pycbc_page_snrchi
index 62ed1eeb087..09fb15abd11 100644
--- a/bin/plotting/pycbc_page_snrchi
+++ b/bin/plotting/pycbc_page_snrchi
@@ -1,20 +1,31 @@
#!/usr/bin/env python
-import numpy, h5py, argparse, matplotlib, sys
import logging
+import numpy
+import h5py
+import argparse
+import matplotlib
+import sys
matplotlib.use('Agg')
-import pylab, pycbc.results, pycbc.version
+import pylab
+
+import pycbc.results
+import pycbc.version
from pycbc.events import veto
from pycbc.io import (
get_chisq_from_file_choice, chisq_choices, SingleDetTriggers
)
parser = argparse.ArgumentParser()
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--trigger-file', help='Single ifo trigger file')
-parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg)
-parser.add_argument('--veto-file', help='Optional, file of veto segments to remove triggers')
+parser.add_argument('--version', action='version',
+ version=pycbc.version.git_verbose_msg)
+parser.add_argument('--veto-file',
+ help='Optional, file of veto segments to remove triggers')
parser.add_argument('--segment-name', default=None, type=str,
help='Optional, name of segment list to use for vetoes')
-parser.add_argument('--min-snr', type=float, help='Optional, Minimum SNR to plot')
+parser.add_argument('--min-snr', type=float,
+ help='Optional, Minimum SNR to plot')
parser.add_argument('--output-file')
parser.add_argument(
'--newsnr-contours',
@@ -27,7 +38,8 @@ parser.add_argument('--chisq-choice', choices=chisq_choices,
help='Which chisquared to plot. Default=traditional')
args = parser.parse_args()
-pycbc.init_logging(1)
+pycbc.init_logging(args.verbose, default_level=1)
+
if args.newsnr_contours is not None and not args.chisq_choice == 'traditional':
parser.error(
diff --git a/bin/plotting/pycbc_page_snrifar b/bin/plotting/pycbc_page_snrifar
index b5f317c389b..07ee90f3f28 100644
--- a/bin/plotting/pycbc_page_snrifar
+++ b/bin/plotting/pycbc_page_snrifar
@@ -6,10 +6,14 @@
import argparse, h5py, numpy, logging, sys
import matplotlib
matplotlib.use('Agg')
-import pylab, pycbc.results, pycbc.version
-from pycbc import conversions as conv
+import pylab
+
from scipy.special import erfc, erfinv
+import pycbc.results
+import pycbc.version
+from pycbc import conversions as conv
+
def sigma_from_p(p):
return - erfinv(1 - (1 - p) * 2) * 2**0.5
@@ -35,9 +39,10 @@ far_from_p = numpy.vectorize(_far_from_p)
parser = argparse.ArgumentParser()
# General required options
-parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg)
+pycbc.add_common_pycbc_options(parser)
+parser.add_argument('--version', action='version',
+ version=pycbc.version.git_verbose_msg)
parser.add_argument('--trigger-file')
-parser.add_argument('--verbose', action='count')
parser.add_argument('--output-file')
parser.add_argument('--not-cumulative', action='store_true')
parser.add_argument('--trials-factor', type=int, default=1,
@@ -79,9 +84,7 @@ parser.add_argument('--ymax', type=float,
'(in units of 1/years)')
args = parser.parse_args()
-if args.verbose:
- log_level = logging.INFO
- logging.basicConfig(format='%(asctime)s : %(message)s', level=log_level)
+pycbc.init_logging(args.verbose)
logging.info('Read in the data')
f = h5py.File(args.trigger_file, 'r')
diff --git a/bin/plotting/pycbc_page_snrratehist b/bin/plotting/pycbc_page_snrratehist
index 71dea017124..f772c9c3bc8 100755
--- a/bin/plotting/pycbc_page_snrratehist
+++ b/bin/plotting/pycbc_page_snrratehist
@@ -3,13 +3,21 @@
Also has the ability to plot inclusive backgrounds from different stages
of hierarchical removal.
"""
-import argparse, h5py, numpy, logging, sys
+import argparse
+import h5py
+import numpy
+import logging
+import sys
import matplotlib
matplotlib.use('Agg')
-import pylab, pycbc.results, pycbc.version
-from pycbc import conversions as conv
+import pylab
+
from scipy.special import erf, erfinv
+import pycbc.results
+import pycbc.version
+from pycbc import conversions as conv
+
def sigma_from_p(p):
return - erfinv(1 - (1 - p) * 2) * 2**0.5
@@ -19,9 +27,10 @@ def p_from_sigma(sig):
parser = argparse.ArgumentParser()
# General required options
-parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg)
+pycbc.add_common_pycbc_options(parser)
+parser.add_argument('--version', action='version',
+ version=pycbc.version.git_verbose_msg)
parser.add_argument('--trigger-file')
-parser.add_argument('--verbose', action='count')
parser.add_argument('--output-file')
parser.add_argument('--bin-size', type=float)
parser.add_argument('--x-min', type=float)
@@ -43,6 +52,8 @@ parser.add_argument('--closed-box', action='store_true',
'foreground triggers')
args = parser.parse_args()
+pycbc.init_logging(args.verbose)
+
logging.info('Read in the data')
f = h5py.File(args.trigger_file, 'r')
@@ -82,10 +93,6 @@ if h_inc_back_num > h_iterations:
# Exit the code successfully and bypass the rest of the plotting code.
sys.exit(0)
-if args.verbose:
- log_level = logging.INFO
- logging.basicConfig(format='%(asctime)s : %(message)s', level=log_level)
-
if args.closed_box:
fstat = None
else:
diff --git a/bin/plotting/pycbc_page_template_bin_table b/bin/plotting/pycbc_page_template_bin_table
index ef2f7ffb2aa..68fec14ec6a 100644
--- a/bin/plotting/pycbc_page_template_bin_table
+++ b/bin/plotting/pycbc_page_template_bin_table
@@ -11,13 +11,15 @@ import pycbc.results
from pycbc.version import git_verbose_msg as version
parser = argparse.ArgumentParser()
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version", version=version)
parser.add_argument('--ifo', required=True)
parser.add_argument('--dq-file', required=True)
-parser.add_argument('--verbose', action='count')
parser.add_argument('--output-file')
args = parser.parse_args()
+pycbc.init_logging(args.verbose)
+
f = h5.File(args.dq_file, 'r')
grp = f[args.ifo]['bins']
bin_names = list(grp.keys())
diff --git a/bin/plotting/pycbc_page_versioning b/bin/plotting/pycbc_page_versioning
index 387d58af9b9..74fb09d048a 100755
--- a/bin/plotting/pycbc_page_versioning
+++ b/bin/plotting/pycbc_page_versioning
@@ -6,16 +6,18 @@ information for a set of libraries and executables in
pycbc results pages
"""
-import argparse, logging
+import argparse
+import logging
+
import pycbc.version
-from pycbc import init_logging
+from pycbc import init_logging, add_common_pycbc_options
from pycbc.results import (save_fig_with_metadata, html_escape,
get_library_version_info, get_code_version_numbers)
parser = argparse.ArgumentParser()
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--version', action="version",
version=pycbc.version.git_verbose_msg)
-parser.add_argument('--verbose', action='store_true')
parser.add_argument('--executables', nargs='+', required=True,
help="List of executables to provide version "
"information for")
diff --git a/bin/plotting/pycbc_page_vetotable b/bin/plotting/pycbc_page_vetotable
index 457047fa410..8ae26f8d062 100644
--- a/bin/plotting/pycbc_page_vetotable
+++ b/bin/plotting/pycbc_page_vetotable
@@ -21,10 +21,12 @@
import argparse
import logging
import numpy
-import pycbc.results
import sys
+
from ligo.lw import lsctables
from ligo.lw import utils
+
+import pycbc.results
from pycbc.results import save_fig_with_metadata
import pycbc.version
from pycbc.io.ligolw import LIGOLWContentHandler
@@ -33,6 +35,7 @@ from pycbc.io.ligolw import LIGOLWContentHandler
parser = argparse.ArgumentParser(description=__doc__)
# add command line options
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg)
parser.add_argument('--veto-definer-file', type=str,
help='XML files with a veto_definer table to read.')
@@ -43,8 +46,7 @@ parser.add_argument('--output-file', type=str,
opts = parser.parse_args()
# setup log
-logging.basicConfig(format='%(asctime)s:%(levelname)s, %(message)s',
- level=logging.INFO,datefmt='%I:%M:%S')
+pycbc.init_logging(opts.verbose, default_level=1)
# set column names
columns = (('Category', []),
diff --git a/bin/plotting/pycbc_plot_Nth_loudest_coinc_omicron.py b/bin/plotting/pycbc_plot_Nth_loudest_coinc_omicron.py
index 4cb509ac9f4..c526891e79f 100644
--- a/bin/plotting/pycbc_plot_Nth_loudest_coinc_omicron.py
+++ b/bin/plotting/pycbc_plot_Nth_loudest_coinc_omicron.py
@@ -9,18 +9,22 @@
import numpy as np
import argparse
import glob
-from ligo.lw import lsctables, utils
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
+
+from ligo.lw import lsctables, utils
+
import pycbc.events
-from pycbc.waveform import get_td_waveform, frequency_from_polarizations, amplitude_from_polarizations
+from pycbc.waveform import (
+ get_td_waveform, frequency_from_polarizations,
+ amplitude_from_polarizations
+)
from pycbc.io.ligolw import LIGOLWContentHandler
-logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
-
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--coinc-file', type=str, required=True,
help='HDF file containing coincident CBC triggers')
parser.add_argument('--single-ifo-trigs', type=str, required=True,
@@ -46,6 +50,8 @@
help='Designates which level of the analysis output to search')
args = parser.parse_args()
+pycbc.init_logging(args.verbose)
+
logging.info('Reading HDF files')
coinc_trig_file = h5py.File(args.coinc_file,'r')
diff --git a/bin/plotting/pycbc_plot_background_coincs b/bin/plotting/pycbc_plot_background_coincs
index 2dc9ee65357..ee490285f00 100644
--- a/bin/plotting/pycbc_plot_background_coincs
+++ b/bin/plotting/pycbc_plot_background_coincs
@@ -1,17 +1,23 @@
#!/usr/bin/env python
""" Plot PyCBC's background coinc triggers
"""
-import numpy, h5py, argparse
-import matplotlib; matplotlib.use('Agg')
+import numpy
+import h5py
+import argparse
+import matplotlib
+matplotlib.use('Agg')
from matplotlib.colors import LogNorm
from matplotlib.ticker import LogLocator
import pylab
+from pycbc import add_common_pycbc_options, init_logging
+
def get_var(data, name):
if name in data:
return data[name][:]
parser = argparse.ArgumentParser(description=__doc__)
+add_common_pycbc_options(parser)
parser.add_argument('--coinc-file', help="Coincident trigger file. The result"
" of pycbc_coinc_statmap ")
parser.add_argument('--x-var', type=str, required=True,
@@ -27,7 +33,9 @@ parser.add_argument('--grid-size', default=100, help="Number of hexbins", type=i
parser.add_argument('--dpi', type=int, default=200)
parser.add_argument('--output-file')
args = parser.parse_args()
-
+
+init_logging(args.verbose)
+
f = h5py.File(args.coinc_file)
bdata = f['background_exc']
x = get_var(bdata, args.x_var)
diff --git a/bin/plotting/pycbc_plot_bank_bins b/bin/plotting/pycbc_plot_bank_bins
index 24d9095a29c..740fbfdf40c 100644
--- a/bin/plotting/pycbc_plot_bank_bins
+++ b/bin/plotting/pycbc_plot_bank_bins
@@ -5,12 +5,14 @@ import sys
import argparse
import h5py
import numpy
-import matplotlib; matplotlib.use('Agg')
+import matplotlib
+matplotlib.use('Agg')
import pylab
import inspect
+from itertools import cycle
+
import pycbc.events, pycbc.pnutils, pycbc.conversions, pycbc.results
import pycbc.version
-from itertools import cycle
class H5BankFile(h5py.File):
@@ -76,13 +78,17 @@ class H5BankFile(h5py.File):
parser = argparse.ArgumentParser()
-parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg)
+pycbc.add_common_pycbc_options(parser)
+parser.add_argument('--version', action='version',
+ version=pycbc.version.git_verbose_msg)
parser.add_argument('--bank-file', help='hdf format template bank file',
required=True)
-parser.add_argument('--background-bins', nargs='+', help='list of background bin format strings')
+parser.add_argument('--background-bins', nargs='+',
+ help='list of background bin format strings')
parser.add_argument('--f-lower', type=float,
- help="Lower frequency cutoff for evaluating template duration. Should"
- " be equal to the lower cutoff used in inspiral jobs")
+ help="Lower frequency cutoff for evaluating template "
+ "duration. Should be equal to the lower cutoff "
+ "used in inspiral jobs")
parser.add_argument('--output-file', help='output file', required=True)
parser.add_argument('--x-var', type=str, choices=H5BankFile.get_param_names(),
default='mass1',
@@ -96,6 +102,8 @@ parser.add_argument('--log-y', action='store_true',
help='Make y-axis logarithmic')
args = parser.parse_args()
+pycbc.init_logging(args.verbose)
+
bank = H5BankFile(args.bank_file, 'r')
f_lower = args.f_lower or bank['f_lower'][:]
diff --git a/bin/plotting/pycbc_plot_bank_corner b/bin/plotting/pycbc_plot_bank_corner
index f483424430d..236f4d1e7e4 100644
--- a/bin/plotting/pycbc_plot_bank_corner
+++ b/bin/plotting/pycbc_plot_bank_corner
@@ -28,7 +28,7 @@ import argparse
import logging
from textwrap import wrap
-from pycbc import init_logging
+import pycbc
import pycbc.version
from pycbc import __version__
from pycbc.results.plot import (add_style_opt_to_parser, set_style_from_cli)
@@ -51,13 +51,11 @@ parameter_options = conversion_options + _fit_parameters
parser = argparse.ArgumentParser(usage='pycbc_plot_bank_corner [--options]',
description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version",
action="version",
version=__version__,
help="Prints version information.")
-parser.add_argument("--verbose",
- action='count',
- help="Output progress and information to stderr")
parser.add_argument("--bank-file",
required=True,
help="The bank file to read in and plot")
@@ -129,7 +127,7 @@ parser.add_argument('--title',
add_style_opt_to_parser(parser)
args = parser.parse_args()
-init_logging(args.verbose)
+pycbc.init_logging(args.verbose)
set_style_from_cli(args)
mins, maxs = option_utils.plot_ranges_from_cli(args)
diff --git a/bin/plotting/pycbc_plot_dq_flag_likelihood b/bin/plotting/pycbc_plot_dq_flag_likelihood
index ed4a6a1231f..4f2b7661b45 100644
--- a/bin/plotting/pycbc_plot_dq_flag_likelihood
+++ b/bin/plotting/pycbc_plot_dq_flag_likelihood
@@ -8,13 +8,14 @@ import pycbc
import h5py
from matplotlib import use as matplotlib_use
from matplotlib import pyplot
+matplotlib_use('Agg')
+
from pycbc.version import git_verbose_msg as version
import pycbc.results
-matplotlib_use('Agg')
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--version', action='version', version=version)
-parser.add_argument('--verbose', action="store_true")
parser.add_argument("--dq-file", required=True)
parser.add_argument("--dq-label", required=True)
parser.add_argument("--ifo", type=str, required=True)
diff --git a/bin/plotting/pycbc_plot_dq_likelihood_vs_time b/bin/plotting/pycbc_plot_dq_likelihood_vs_time
index 22500362c83..c52ec93b053 100644
--- a/bin/plotting/pycbc_plot_dq_likelihood_vs_time
+++ b/bin/plotting/pycbc_plot_dq_likelihood_vs_time
@@ -7,14 +7,16 @@ import argparse
import numpy
import pycbc
import h5py
-from matplotlib import use; use('Agg')
+from matplotlib import use
+use('Agg')
from matplotlib import pyplot
+
from pycbc.version import git_verbose_msg as version
import pycbc.results
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--version', action='version', version=version)
-parser.add_argument('--verbose', action="store_true")
parser.add_argument("--ifo", type=str, required=True)
parser.add_argument("--dq-file", required=True)
parser.add_argument('--background-bin', default='all_bin')
diff --git a/bin/plotting/pycbc_plot_dq_percentiles b/bin/plotting/pycbc_plot_dq_percentiles
index ad7ab402e4d..9efa433214e 100644
--- a/bin/plotting/pycbc_plot_dq_percentiles
+++ b/bin/plotting/pycbc_plot_dq_percentiles
@@ -7,14 +7,16 @@ import argparse
import numpy
import pycbc
import h5py
-from matplotlib import use; use('Agg')
+from matplotlib import use
+use('Agg')
from matplotlib import pyplot
+
from pycbc.version import git_verbose_msg as version
import pycbc.results
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--version', action='version', version=version)
-parser.add_argument('--verbose', action="store_true")
parser.add_argument("--ifo", type=str,required=True)
parser.add_argument("--dq-file", required=True)
parser.add_argument('--background-bin', default='all_bin')
diff --git a/bin/plotting/pycbc_plot_gate_triggers b/bin/plotting/pycbc_plot_gate_triggers
index ec54c00d204..cfd63428044 100755
--- a/bin/plotting/pycbc_plot_gate_triggers
+++ b/bin/plotting/pycbc_plot_gate_triggers
@@ -20,13 +20,15 @@ import h5py
import numpy
import logging
import argparse
-from pycbc.events import ranking
-from matplotlib import use; use('Agg')
+from matplotlib import use
+use('Agg')
import matplotlib.pyplot as plt
+from pycbc import add_common_pycbc_options, init_logging
+from pycbc.events import ranking
-parser = argparse.ArgumentParser()
-parser.add_argument('--verbose', action='store_true')
+parser = argparse.ArgumentParser(description=__doc__)
+add_common_pycbc_options(parser)
parser.add_argument('--single-trigger-files', nargs='+',
help='HDF format single detector merged trigger file(s) path. '
'All triggers are combined in the histogram')
@@ -53,11 +55,7 @@ parser.add_argument('--output-file',
help='Plot file name (optional).')
args = parser.parse_args()
-if args.verbose:
- log_level = logging.INFO
-else:
- log_level = logging.WARN
-logging.basicConfig(format='%(asctime)s : %(message)s', level=log_level)
+init_logging(args.verbose)
if args.gating_type:
if args.gating_type == 'auto':
diff --git a/bin/plotting/pycbc_plot_gating b/bin/plotting/pycbc_plot_gating
index e22746baea1..29a72e4ab64 100644
--- a/bin/plotting/pycbc_plot_gating
+++ b/bin/plotting/pycbc_plot_gating
@@ -12,12 +12,14 @@ import pylab as pl
from matplotlib.patches import Rectangle
import mpld3
import mpld3.plugins
+
from pycbc.results.color import ifo_color
from pycbc.results.mpld3_utils import MPLSlide
import pycbc.version
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version",
version=pycbc.version.git_verbose_msg)
parser.add_argument('--input-file', nargs='+', required=True,
@@ -27,9 +29,7 @@ parser.add_argument('--output-file', required=True,
help='Destination file for the plot.')
args = parser.parse_args()
-log_fmt = '%(asctime)s %(message)s'
-log_date_fmt = '%Y-%m-%d %H:%M:%S'
-logging.basicConfig(level=logging.INFO, format=log_fmt, datefmt=log_date_fmt)
+pycbc.init_logging(args.verbose)
gate_data = {}
have_gates = False
diff --git a/bin/plotting/pycbc_plot_hist b/bin/plotting/pycbc_plot_hist
index b252c82b430..b3cf4172a43 100644
--- a/bin/plotting/pycbc_plot_hist
+++ b/bin/plotting/pycbc_plot_hist
@@ -2,14 +2,24 @@
""" Make histograms of single detector triggers
"""
-import numpy, argparse, h5py, logging, sys
-import pycbc.version, pycbc.results, pycbc.io
+import numpy
+import argparse
+import h5py
+import logging
+import sys
from itertools import cycle
-from matplotlib import use; use('Agg')
+from matplotlib import use
+use('Agg')
from matplotlib import pyplot
+
+import pycbc
+import pycbc.version
+import pycbc.results
+import pycbc.io
from pycbc.events import background_bin_from_string, veto, ranking
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg)
parser.add_argument('--trigger-file', required=True,
help="Combined single detector hdf trigger file")
@@ -32,7 +42,6 @@ parser.add_argument('--x-min', type=float, default=6,
parser.add_argument('--special-time', type=float,
help="plot triggers within +-1s of a given time in a "
"different color (black)")
-parser.add_argument('--verbose')
args = parser.parse_args()
# sanity check command line options
diff --git a/bin/plotting/pycbc_plot_multiifo_dtphase b/bin/plotting/pycbc_plot_multiifo_dtphase
index 2918e10d0a0..e09db6fbeea 100755
--- a/bin/plotting/pycbc_plot_multiifo_dtphase
+++ b/bin/plotting/pycbc_plot_multiifo_dtphase
@@ -15,12 +15,19 @@
Script to plot the output of pycbc_multiifo_dtphase in a corner plot
"""
-import matplotlib, h5py, numpy as np, copy
-import itertools, sys, logging, argparse
+import matplotlib
+import h5py
+import numpy as np
+import copy
+import itertools
+import sys
+import logging
+import argparse
matplotlib.use('agg')
from matplotlib import pyplot as plt
+
from pycbc.events import coinc_rate
-from pycbc import init_logging, version
+from pycbc import init_logging, version, add_common_pycbc_options
from pycbc.results import save_fig_with_metadata
def marginalise_pdf(pdf, dimensions_to_keep):
@@ -35,9 +42,9 @@ def marginalise_pdf(pdf, dimensions_to_keep):
return mrg_pdf
parser = argparse.ArgumentParser()
+add_common_pycbc_options(parser)
parser.add_argument('--version', action="version",
version=version.git_verbose_msg)
-parser.add_argument('--verbose', action='store_true')
parser.add_argument('--input-file', required=True,
help="Input phasetd histogram file, made using "
"pycbc_multiifo_dtphase")
diff --git a/bin/plotting/pycbc_plot_psd_file b/bin/plotting/pycbc_plot_psd_file
index 8015d70e146..d80d3960b1d 100644
--- a/bin/plotting/pycbc_plot_psd_file
+++ b/bin/plotting/pycbc_plot_psd_file
@@ -3,12 +3,20 @@
"""
import matplotlib
matplotlib.use('Agg')
-import h5py, numpy, argparse, pylab, pycbc.results, sys
+import h5py
+import numpy
+import argparse
+import pylab
+import sys
+
+import pycbc
+import pycbc.results
import pycbc.psd
import pycbc.version
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version",
version=pycbc.version.git_verbose_msg)
parser.add_argument("--psd-files", nargs='+', required=True,
@@ -40,6 +48,8 @@ pycbc.psd.insert_psd_option_group(parser, output=False)
pycbc.results.add_style_opt_to_parser(parser)
args = parser.parse_args()
+pycbc.init_logging(args.verbose)
+
# set the matplotlib style
pycbc.results.set_style_from_cli(args)
diff --git a/bin/plotting/pycbc_plot_psd_timefreq b/bin/plotting/pycbc_plot_psd_timefreq
index 9aae35c2c9d..00d2613f302 100644
--- a/bin/plotting/pycbc_plot_psd_timefreq
+++ b/bin/plotting/pycbc_plot_psd_timefreq
@@ -29,10 +29,13 @@ import sys
import matplotlib
matplotlib.use('agg')
import pylab
-import pycbc.results
from matplotlib.colors import LogNorm
+import pycbc
+import pycbc.results
+
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--psd-file', required=True,
help='HDF file containing the PSDs.')
parser.add_argument('--output-file', required=True,
@@ -47,7 +50,6 @@ parser.add_argument('--reduction-method', choices=['min','max','mean','median'],
default='mean',
help='Method of reducing the ASD data into fewer'
' frequency bins to plot (default %(default)s).')
-parser.add_argument('--verbose', action='store_true')
opts = parser.parse_args()
pycbc.init_logging(opts.verbose)
diff --git a/bin/plotting/pycbc_plot_qscan b/bin/plotting/pycbc_plot_qscan
index 2b20c0ca3d6..ba4ba9b3cdf 100644
--- a/bin/plotting/pycbc_plot_qscan
+++ b/bin/plotting/pycbc_plot_qscan
@@ -46,6 +46,7 @@ def t_window(s):
raise argparse.ArgumentTypeError("Input must be start,end start,end")
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version",
version=pycbc.version.git_verbose_msg)
parser.add_argument('--output-file', required=True, help='Output plot')
@@ -114,7 +115,7 @@ parser.add_argument('--spin2z', type=float, default=0,
pycbc.strain.insert_strain_option_group(parser)
opts = parser.parse_args()
-logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
+pycbc.init_logging(opts.verbose, default_level=1)
if opts.center_time is None:
center_time = (opts.gps_start_time + opts.gps_end_time) / 2.
diff --git a/bin/plotting/pycbc_plot_range b/bin/plotting/pycbc_plot_range
index f2a0cd82c6f..765a45f2d1f 100644
--- a/bin/plotting/pycbc_plot_range
+++ b/bin/plotting/pycbc_plot_range
@@ -1,22 +1,40 @@
#!/usr/bin/env python
""" Plot variation in PSD
"""
-import matplotlib; matplotlib.use('Agg');
-import h5py, numpy, argparse, pylab, sys
-import pycbc.results, pycbc.types, pycbc.version, pycbc.waveform, pycbc.filter
+import matplotlib
+matplotlib.use('Agg');
+import h5py
+import logging
+import numpy
+import argparse
+import pylab
+import sys
+
+import pycbc.results
+import pycbc.types
+import pycbc.version
+import pycbc.waveform
+import pycbc.filter
from pycbc.fft.fftw import set_measure_level
set_measure_level(0)
parser = argparse.ArgumentParser(description=__doc__)
-parser.add_argument("--version", action='version', version=pycbc.version.git_verbose_msg)
+pycbc.add_common_pycbc_options(parser)
+parser.add_argument("--version", action='version',
+ version=pycbc.version.git_verbose_msg)
parser.add_argument("--psd-files", nargs='+', help='HDF file of psds')
parser.add_argument("--output-file", help='output file name')
-parser.add_argument("--mass1", nargs="+", help="Mass of first component in solar masses", type=float)
-parser.add_argument("--mass2", nargs="+", help="Mass of second component in solar masses", type=float)
-parser.add_argument("--approximant", nargs="+", help="approximant to use for range")
+parser.add_argument("--mass1", nargs="+", type=float,
+ help="Mass of first component in solar masses")
+parser.add_argument("--mass2", nargs="+", type=float,
+ help="Mass of second component in solar masses")
+parser.add_argument("--approximant", nargs="+",
+ help="approximant to use for range")
args = parser.parse_args()
+pycbc.init_logging(args.verbose)
+
canonical_snr = 8.0
fig = pylab.figure(0)
@@ -77,3 +95,5 @@ pycbc.results.save_fig_with_metadata(fig, args.output_file,
cmd = ' '.join(sys.argv),
fig_kwds={'dpi':200}
)
+
+logging.info("Done!")
diff --git a/bin/plotting/pycbc_plot_range_vs_mtot b/bin/plotting/pycbc_plot_range_vs_mtot
index c90958b349d..14cf6bfab92 100644
--- a/bin/plotting/pycbc_plot_range_vs_mtot
+++ b/bin/plotting/pycbc_plot_range_vs_mtot
@@ -4,22 +4,40 @@
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
-import h5py, numpy, argparse, sys, math
-import pycbc.results, pycbc.types, pycbc.version, pycbc.waveform, pycbc.filter
+import h5py
+import numpy
+import argparse
+import sys
+import math
+
+import pycbc.results
+import pycbc.types
+import pycbc.version
+import pycbc.waveform
+import pycbc.filter
from pycbc.fft.fftw import set_measure_level
set_measure_level(0)
parser = argparse.ArgumentParser(description=__doc__)
-parser.add_argument("--version", action='version', version=pycbc.version.git_verbose_msg)
-parser.add_argument("--psd-files", nargs='+', help='HDF file of psds')
+pycbc.add_common_pycbc_options(parser)
+parser.add_argument("--version", action='version',
+ version=pycbc.version.git_verbose_msg)
+parser.add_argument("--psd-files", nargs='+',
+ help='HDF file of psds')
parser.add_argument("--output-file", help='output file name')
-parser.add_argument("--min_mtot", nargs="+", help="Minimum total mass for range", type=float)
-parser.add_argument("--max_mtot", nargs="+", help="Maximum total mass for range", type=float)
-parser.add_argument("--d_mtot", nargs="+", help="Delta total mass for range ", type=float)
-parser.add_argument("--approximant", nargs="+", help="approximant to use for range")
+parser.add_argument("--min_mtot", nargs="+", type=float,
+ help="Minimum total mass for range")
+parser.add_argument("--max_mtot", nargs="+", type=float,
+ help="Maximum total mass for range")
+parser.add_argument("--d_mtot", nargs="+", type=float,
+ help="Delta total mass for range ")
+parser.add_argument("--approximant", nargs="+",
+ help="approximant to use for range")
args = parser.parse_args()
+pycbc.init_logging(args.verbose)
+
canonical_snr = 8.0
fig = plt.figure(0)
diff --git a/bin/plotting/pycbc_plot_singles_timefreq b/bin/plotting/pycbc_plot_singles_timefreq
index e8d12e9575a..a5222c90283 100644
--- a/bin/plotting/pycbc_plot_singles_timefreq
+++ b/bin/plotting/pycbc_plot_singles_timefreq
@@ -31,6 +31,8 @@ import pylab as pl
import matplotlib.mlab as mlab
from matplotlib.colors import LogNorm
from matplotlib.ticker import LogLocator
+
+import pycbc
from pycbc.io import HFile
import pycbc.events
import pycbc.pnutils
@@ -41,6 +43,7 @@ import pycbc.waveform
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version",
version=pycbc.version.git_verbose_msg)
parser.add_argument('--trig-file', required=True,
@@ -70,7 +73,7 @@ pycbc.waveform.bank.add_approximant_arg(parser,
pycbc.strain.insert_strain_option_group(parser)
opts = parser.parse_args()
-logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
+pycbc.init_logging(opts.verbose)
if opts.center_time is None:
center_time = (opts.gps_start_time + opts.gps_end_time) / 2.
diff --git a/bin/plotting/pycbc_plot_singles_vs_params b/bin/plotting/pycbc_plot_singles_vs_params
index f588689ddd4..0fa5620b68c 100644
--- a/bin/plotting/pycbc_plot_singles_vs_params
+++ b/bin/plotting/pycbc_plot_singles_vs_params
@@ -29,15 +29,18 @@ import pylab as pl
from matplotlib.colors import LogNorm
from matplotlib.ticker import LogLocator
import h5py
+import sys
+
+import pycbc
import pycbc.pnutils
import pycbc.events
import pycbc.results
import pycbc.io
-import sys
import pycbc.version
from pycbc.events import ranking
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version",
version=pycbc.version.git_verbose_msg)
parser.add_argument('--single-trig-file', required=True,
@@ -80,7 +83,7 @@ parser.add_argument('--min-z', type=float, help='Optional minimum z value')
parser.add_argument('--max-z', type=float, help='Optional maximum z value')
opts = parser.parse_args()
-logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
+pycbc.init_logging(opts.verbose)
if opts.z_var == 'density' or opts.min_z is None:
filter_rank = None
diff --git a/bin/plotting/pycbc_plot_throughput b/bin/plotting/pycbc_plot_throughput
index fd8a19d7fdb..c09c1ed6c01 100755
--- a/bin/plotting/pycbc_plot_throughput
+++ b/bin/plotting/pycbc_plot_throughput
@@ -4,13 +4,18 @@ import argparse
import logging
import h5py
import numpy as np
-import matplotlib; matplotlib.use('Agg')
+import matplotlib
+matplotlib.use('Agg')
import pylab as pl
+
+from scipy.stats import hmean
+
+import pycbc
from pycbc.results.color import ifo_color
import pycbc.version
-from scipy.stats import hmean
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version",
version=pycbc.version.git_verbose_msg)
parser.add_argument('--input-file', nargs='+', required=True,
@@ -21,6 +26,8 @@ parser.add_argument('--output-file', required=True,
parser.add_argument('--duration-weighted', action="store_true")
args = parser.parse_args()
+pycbc.init_logging(args.verbose)
+
fig, (ax1, ax2, ax3) = pl.subplots(3,1,figsize=(10,10))
for pa in args.input_file:
diff --git a/bin/plotting/pycbc_plot_trigrate b/bin/plotting/pycbc_plot_trigrate
index 96b7fc1e6de..c5831733974 100644
--- a/bin/plotting/pycbc_plot_trigrate
+++ b/bin/plotting/pycbc_plot_trigrate
@@ -13,16 +13,18 @@
# Public License for more details.
-import sys, h5py
-import argparse, logging
-
+import sys
+import h5py
+import argparse
+import logging
from matplotlib import use
use('Agg')
from matplotlib import pyplot as plt
-
-import copy, numpy as np
+import copy
+import numpy as np
from scipy import stats as scistats
+import pycbc
from pycbc import io, events, bin_utils, results
from pycbc.events import triggers
from pycbc.events import ranking
@@ -39,10 +41,8 @@ def get_stat(statchoice, trigs):
#### MAIN ####
parser = argparse.ArgumentParser(usage="", description="Plot trigger rates")
-
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action=pycbc.version.Version)
-parser.add_argument("-V", "--verbose", action="store_true",
- help="Print extra debugging information", default=False)
parser.add_argument("--trigger-file",
help="Input hdf5 file containing single triggers. "
"Required")
@@ -119,11 +119,7 @@ if (args.gps_start_time or args.gps_end_time) and not (args.gps_start_time \
and args.gps_end_time):
raise RuntimeError("I need both gps start time and end time!")
-if args.verbose:
- log_level = logging.DEBUG
-else:
- log_level = logging.WARN
-logging.basicConfig(format='%(asctime)s : %(message)s', level=log_level)
+pycbc.init_logging(opts.verbose)
statname = "reweighted SNR" if args.sngl_stat == "new_snr" else \
args.sngl_stat.replace("_", " ").replace("snr", "SNR")
diff --git a/bin/plotting/pycbc_plot_vt_ratio b/bin/plotting/pycbc_plot_vt_ratio
index 0145584965e..5cf74231975 100644
--- a/bin/plotting/pycbc_plot_vt_ratio
+++ b/bin/plotting/pycbc_plot_vt_ratio
@@ -14,10 +14,12 @@ import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
-from pycbc.results import save_fig_with_metadata
+from pycbc import init_logging, add_common_pycbc_options
+from pycbc.results import save_fig_with_metadata
parser = argparse.ArgumentParser(description=__doc__)
+add_common_pycbc_options(parser)
parser.add_argument('--vt-files-one', nargs='+',
help='HDF files containing VT curves, data for '
'the numerator (top) of the ratio')
@@ -42,6 +44,8 @@ parser.add_argument('--log-y', action='store_true',
help='Use logarithmic y-axis')
args = parser.parse_args()
+init_logging(args.verbose)
+
# Warn user if different numbers of files in numerator vs denominator
if len(args.vt_files_one) != len(args.vt_files_two):
logging.warning(
diff --git a/bin/plotting/pycbc_plot_waveform b/bin/plotting/pycbc_plot_waveform
index 1701a2c5c5e..9f94d1aac9d 100644
--- a/bin/plotting/pycbc_plot_waveform
+++ b/bin/plotting/pycbc_plot_waveform
@@ -23,15 +23,17 @@ matplotlib.use('Agg')
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1.inset_locator import zoomed_inset_axes
from mpl_toolkits.axes_grid1.inset_locator import mark_inset
+
from pycbc import waveform, io
from pycbc import version
from pycbc import results
+from pycbc import init_logging, add_common_pycbc_options
from pycbc.fft import ifft
from pycbc.types import complex_same_precision_as
from pycbc.types import TimeSeries, zeros, complex64
-parser = argparse.ArgumentParser(usage='',
- description="Plot a waveform in both time and frequency domain.")
+parser = argparse.ArgumentParser(usage='', description=__doc__)
+add_common_pycbc_options(parser)
parser.add_argument('--version', action='version',
version=version.git_verbose_msg)
parser.add_argument('--output-file', required=True)
@@ -89,6 +91,8 @@ parser.add_argument('--plot-caption',
opt = parser.parse_args()
+init_logging(opt.verbose)
+
delta_f = 1. / opt.waveform_length
delta_t = 1. / opt.sample_rate
tlen = int(opt.waveform_length * opt.sample_rate)
diff --git a/bin/pycbc_condition_strain b/bin/pycbc_condition_strain
index e0141b61311..e9ca518c113 100644
--- a/bin/pycbc_condition_strain
+++ b/bin/pycbc_condition_strain
@@ -26,6 +26,7 @@ injections.
import logging
import argparse
+
import pycbc.strain
import pycbc.version
import pycbc.frame
@@ -45,6 +46,7 @@ def write_strain(file_name, channel, data):
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version",
version=pycbc.version.git_verbose_msg)
parser.add_argument('--output-strain-file', required=True,
@@ -79,6 +81,8 @@ pycbc.strain.insert_strain_option_group(parser)
pycbc.fft.insert_fft_option_group(parser)
args = parser.parse_args()
+pycbc.init_logging(args.verbose, default_level=1)
+
# Take in / deal with the FFT options
pycbc.fft.verify_fft_options(args, parser)
pycbc.fft.from_cli(args)
@@ -86,8 +90,6 @@ pycbc.fft.from_cli(args)
if args.frame_duration is not None and args.frame_duration <= 0:
parser.error('Frame duration should be positive integer, {} given'.format(args.frame_duration))
-logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
-
# read and condition strain as pycbc_inspiral would do
out_strain = pycbc.strain.from_cli(args, dyn_range_fac=pycbc.DYN_RANGE_FAC,
precision=args.output_precision)
diff --git a/bin/pycbc_get_ffinal b/bin/pycbc_get_ffinal
index 06afe7d4fd8..a5767a198d3 100644
--- a/bin/pycbc_get_ffinal
+++ b/bin/pycbc_get_ffinal
@@ -1,17 +1,17 @@
#! /usr/bin/env python
+"""Finds the maximum frequency of waveforms by generating them. Will also report
+the duration of time domain waveforms.
+"""
__prog__ = 'pycbc_get_ffinal'
__author__ = 'Collin Capano '
-__description__ = """
-Finds the maximum frequency of waveforms by generating them. Will also report
-the duration of time domain waveforms.
-"""
import os, sys
import numpy
-import optparse
+import argparse
+import pycbc
from pycbc import waveform
from ligo.lw import lsctables
@@ -19,91 +19,90 @@ from ligo.lw import utils as ligolw_utils
from ligo.lw import table
from ligo.lw.utils import process
-def fstr(input):
- """
- Helper function that automatically strips new line characters from a
- string. This is so that you can input multi-line strings to optparse
- without having to worry about the formatting being messed up in the help
- message.
-
- Parameters
- ----------
- input: string
- The input string to strip new line characters from.
- """
- return ' '.join(map(str.strip, input.split('\n'))).strip()
-
-
-parser = optparse.OptionParser(description = fstr(__description__))
-parser.add_option("-i", "--input", help=fstr("""
- Input file. If specified, any single waveform parameters given by the below
- options will be ignored.
- """))
-parser.add_option("-o", "--output", help=fstr("""
- Output file. Required if specifying an input file."""))
-parser.add_option("-a", "--approximant", help=fstr("""
- What approximant to use to genrate the waveform(s). Options are:
- TD approximants: %s.""" %(', '.join(waveform.td_approximants())) + """
- FD approximants: %s.""" %(', '.join(waveform.fd_approximants()))))
-parser.add_option('-f', '--f-min', type='float', help=fstr("""
- Frequency at which to start the waveform generation (in Hz)."""))
-parser.add_option('-r', '--sample-rate', type='int', help = fstr("""
- Sample rate to use (in Hz). Required for TD approximants."""))
-parser.add_option('-s', '--segment-length', type='int', help = fstr("""
- The inverse of deltaF (in s). Required for FD approximants."""))
-parser.add_option('-m', '--max-sample-rate', type='int', help = fstr("""
- Optional. Maximum sample rate to use (in Hz). If the Nyquist frequency of
- the given sample rate is lower than the ringdown frequency, an error will
- occur for some waveform approximants. If max-sample-rate is specified,
- the code will try increasing the sample-rate by a factor of 2 until it
- finds a frequency that works or until it exceeds the specified maximum
- rate."""))
-parser.add_option('-v', '--verbose', action='store_true', help='Be verbose.')
-waveformOpts = optparse.OptionGroup(parser, fstr("""
- Optional arguments for specifying a single waveform. These will be ignored
- if an input file is given."""))
-waveformOpts.add_option('', '--mass1', type='float', help=fstr("""
- Specify mass1 of a single waveform."""))
-waveformOpts.add_option('', '--mass2', type='float', help=fstr("""
- Specify mass2 of a single waveform."""))
-waveformOpts.add_option('', '--spin1x', type='float', default=0,
+appx_options = waveform.td_approximants() + waveform.fd_approximants()
+
+parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
+parser.add_argument("-i", "--input",
+ help="Input file. If specified, any single waveform "
+ "parameters given by the below options will be "
+ "ignored.")
+parser.add_argument("-o", "--output",
+ help="Output file. Required if specifying an input "
+ "file.")
+parser.add_argument("-a", "--approximant", required=True,
+ help=("What approximant to use to generate the "
+ "waveform(s). Options are: TD approximants: %s."
+ % ', '.join(waveform.td_approximants()) +
+ ". FD approximants: %s." %
+ ', '.join(waveform.fd_approximants())),
+ choices=appx_options)
+parser.add_argument('-f', '--f-min', type=float,
+ help="Frequency at which to start the waveform "
+ "generation (in Hz).")
+parser.add_argument('-r', '--sample-rate', type=int,
+ help="Sample rate to use (in Hz). Required for TD "
+ "approximants.")
+parser.add_argument('-s', '--segment-length', type=int,
+ help = "The inverse of deltaF (in s). Required for FD "
+ "approximants.")
+parser.add_argument('-m', '--max-sample-rate', type=int,
+ help="Optional. Maximum sample rate to use (in Hz). "
+ "If the Nyquist frequency of the given sample rate "
+ "is lower than the ringdown frequency, an error "
+ "will occur for some waveform approximants. If "
+ "max-sample-rate is specified, the code will try "
+ "increasing the sample-rate by a factor of 2 until "
+ "it finds a frequency that works or until it "
+ "exceeds the specified maximum rate.")
+waveform_opts = parser.add_argument_group("Waveform Options",
+ "Optional arguments for specifying a single waveform. These will be "
+ "ignored if an input file is given.")
+waveform_opts.add_argument('--mass1', type=float,
+ help="Specify mass1 of a single waveform.")
+waveform_opts.add_argument('--mass2', type=float,
+ help="Specify mass2 of a single waveform.")
+waveform_opts.add_argument('--spin1x', type=float, default=0,
help='Specify spin1x of a single waveform.')
-waveformOpts.add_option('', '--spin1y', type='float', default=0,
+waveform_opts.add_argument('--spin1y', type=float, default=0,
help='Specify spin1y of a single waveform.')
-waveformOpts.add_option('', '--spin1z', type='float', default=0,
+waveform_opts.add_argument('--spin1z', type=float, default=0,
help='Specify spin1z of a single waveform.')
-waveformOpts.add_option('', '--spin2x', type='float', default=0,
+waveform_opts.add_argument('--spin2x', type=float, default=0,
help='Specify spin2x of a single waveform.')
-waveformOpts.add_option('', '--spin2y', type='float', default=0,
+waveform_opts.add_argument('--spin2y', type=float, default=0,
help='Specify spin2y of a single waveform.')
-waveformOpts.add_option('', '--spin2z', type='float', default=0,
+waveform_opts.add_argument('--spin2z', type=float, default=0,
help='Specify spin2z of a single waveform.')
-waveformOpts.add_option('', '--lambda1', type='float', default=0,
+waveform_opts.add_argument('--lambda1', type=float, default=0,
help='Specify lambda1 of a single waveform.')
-waveformOpts.add_option('', '--lambda2', type='float', default=0,
+waveform_opts.add_argument('--lambda2', type=float, default=0,
help='Specify lambda2 of a single waveform.')
-waveformOpts.add_option('', '--phase-order', type='int', default=-1,
+waveform_opts.add_argument('--phase-order', type=int, default=-1,
help='Specify phase-order of a single waveform.')
-waveformOpts.add_option('', '--spin-order', type='int', default=-1,
+waveform_opts.add_argument('--spin-order', type=int, default=-1,
help='Specify spin-order of a single waveform.')
-waveformOpts.add_option('', '--tidal-order', type = 'int', default=-1,
+waveform_opts.add_argument('--tidal-order', type=int, default=-1,
help='Specify tidal-order of a single waveform.')
-waveformOpts.add_option('', '--amplitude-order', type = 'int', default=-1,
+waveform_opts.add_argument('--amplitude-order', type=int, default=-1,
help='Specify amplitude-order of a single waveform.')
-parser.add_option_group(waveformOpts)
-opts, _ = parser.parse_args()
+opts = parser.parse_args()
+
+pycbc.init_logging(opts.verbose)
# check options
if opts.input is None and (opts.mass1 is None or opts.mass2 is None):
- raise ValueError("Must specify input file or at least mass1,mass2 "
- "of a single waveform")
+ parser.error("Must specify input file or at least mass1,mass2 "
+ "of a single waveform")
+
infile = opts.input
if opts.input is not None and opts.output is None:
- raise ValueError("Must specify output if giving input file")
+ parser.error("Must specify output if giving input file")
outfile = opts.output
if opts.approximant is None:
- raise ValueError("Must specify an approximant")
+ parser.error("Must specify an approximant")
+
fd_approx = opts.approximant in waveform.fd_approximants()
if not fd_approx and opts.approximant not in waveform.td_approximants():
raise ValueError("Unrecognized approximant {}".format(opts.approximant))
@@ -132,7 +131,7 @@ if infile is not None:
this_process = process.register_to_xmldoc(xmldoc, __prog__, opts.__dict__)
sngl_insp_table = table.Table.get_table(xmldoc, 'sngl_inspiral')
else:
- # FIXME: try to get this from the waveformOpts group
+ # FIXME: try to get this from the waveform_opts group
tmplt_args = ['mass1', 'mass2', 'spin1x', 'spin1y', 'spin1z', 'spin2x',
'spin2y', 'spin2z', 'lambda1', 'lambda2', 'phase_order', 'spin_order',
'tidal_order', 'amplitude_order']
diff --git a/bin/pycbc_make_banksim b/bin/pycbc_make_banksim
index 6e41d9e6cb0..fa733e6d5d6 100644
--- a/bin/pycbc_make_banksim
+++ b/bin/pycbc_make_banksim
@@ -6,10 +6,10 @@ import configparser as ConfigParser
import subprocess
import glob
import tempfile
-from optparse import OptionParser
+from argparse import ArgumentParser
from glue.pipeline import CondorDAGJob, CondorDAGNode, CondorDAG, CondorJob
-from pycbc import init_logging
+from pycbc import init_logging, add_common_pycbc_options
class BaseJob(CondorDAGJob, CondorJob):
def __init__(self, log_dir, executable, cp, section, gpu=False,
@@ -31,7 +31,7 @@ class BaseJob(CondorDAGJob, CondorJob):
if accounting_group:
self.add_condor_cmd('accounting_group', accounting_group)
-
+
self.add_condor_cmd('request_disk', 1024)
class BanksimNode(CondorDAGNode):
@@ -69,16 +69,16 @@ class BanksimNode(CondorDAGNode):
self.add_post_script_arg(str(inj_per_job))
else:
self.add_file_opt("match-file", match_file, file_is_output_file=True)
-
+
class CombineNode(CondorDAGNode):
def __init__(self, job, inj_num):
CondorDAGNode.__init__(self, job)
-
+
self.add_var_opt("inj-num", inj_num)
-
+
outf = "match/match" + str(inj_num) + ".dat"
-
- self.add_file_opt("output-file", outf)
+
+ self.add_file_opt("output-file", outf)
def get_ini_opts(confs, section):
op_str = ""
@@ -86,13 +86,13 @@ def get_ini_opts(confs, section):
val = confs.get(section, opt)
op_str += "--" + opt + " " + val + " \\" + "\n"
return op_str
-
+
def mkdir(dir_name):
try :
os.mkdir(dir_name)
except OSError:
pass
-
+
def mc_min_max_from_sorted_file(fname):
from ligo.lw.utils import load_filename
from ligo.lw.table import Table
@@ -107,33 +107,31 @@ def mc_min_max_from_sorted_file(fname):
mc_max, et = mass1_mass2_to_mchirp_eta(t[0].mass1, t[0].mass2)
mc_min, et = mass1_mass2_to_mchirp_eta(t[-1].mass1, t[-1].mass2)
return mc_min, mc_max
-
-
+
+
bf_mchirps = {}
sf_mchirps = {}
def check_outside_mchirp(bf, sf, w):
if bf not in bf_mchirps:
- bf_mchirps[bf] = mc_min_max_from_sorted_file(bf)
+ bf_mchirps[bf] = mc_min_max_from_sorted_file(bf)
if sf not in sf_mchirps:
- sf_mchirps[sf] = mc_min_max_from_sorted_file(sf)
-
+ sf_mchirps[sf] = mc_min_max_from_sorted_file(sf)
+
mc_min, mc_max = bf_mchirps[bf]
mc2_min, mc2_max = sf_mchirps[sf]
-
+
if (mc_min <= mc2_max * (1+w) ) and (mc_max * (1+w) >= mc2_min):
return False
else:
return True
-parser = OptionParser()
-parser.add_option('--config', type=str)
-(options, args) = parser.parse_args()
-
-if options.config is None:
- raise ValueError("Config file is required")
+parser = ArgumentParser()
+add_common_pycbc_options(parser)
+parser.add_argument('--config', type=str, required=True)
+options = parser.parse_args()
-# logging INFO
-init_logging(1)
+# Default logging level is info: --verbose adds to this
+init_logging(options.verbose, default_level=1)
confs = ConfigParser.ConfigParser()
confs.read(options.config)
@@ -252,7 +250,7 @@ for inj_num in range(num_injs):
combine_has_jobs = True
if combine_has_jobs:
rnode.add_parent(cnode)
- dag.add_node(cnode)
+ dag.add_node(cnode)
dag.add_node(rnode)
pnode.add_parent(rnode)
dag.add_node(pnode)
@@ -280,11 +278,11 @@ matches=[]
maxmatch = []
for fil in fils:
matches.append(loadtxt(fil, dtype=dtypef))
-
+
indices = array(matches, dtype=dtypef)['match'].argmax(0)
for i, j in enumerate(indices):
maxmatch.append(matches[j][i])
-
+
maxmatch=array(maxmatch, dtype =dtypef)
savetxt(options.output_file, maxmatch,fmt=('%5.5f', '%s', '%i', '%s', '%i', '%5.5f'), delimiter=' ')
""")
@@ -310,12 +308,12 @@ for fil in fils:
res = append(res, loadtxt(fil, dtype=dtypem))
else:
res = loadtxt(fil, dtype=dtypem)
-
+
btables = {}
-itables = {}
+itables = {}
f = open("results.dat", "w")
-for row in res:
+for row in res:
outstr = ""
if row['bank'] not in btables:
indoc = utils.load_filename(eval(row['bank']).decode('utf-8'), False,
@@ -329,7 +327,7 @@ for row in res:
bt = btables[row['bank']][row['bank_i']]
it = itables[row['sim']][row['sim_i']]
-
+
outstr += str(row['match']) + " "
outstr += str(bt.mass1) + " "
outstr += str(bt.mass2) + " "
@@ -337,28 +335,28 @@ for row in res:
outstr += str(bt.spin1y) + " "
outstr += str(bt.spin1z) + " "
outstr += str(bt.spin2x) + " "
- outstr += str(bt.spin2y) + " "
+ outstr += str(bt.spin2y) + " "
outstr += str(bt.spin2z) + " "
-
+
outstr += str(it.mass1) + " "
outstr += str(it.mass2) + " "
outstr += str(it.spin1x) + " "
outstr += str(it.spin1y) + " "
outstr += str(it.spin1z) + " "
outstr += str(it.spin2x) + " "
- outstr += str(it.spin2y) + " "
+ outstr += str(it.spin2y) + " "
outstr += str(it.spin2z) + " "
-
+
outstr += str(it.coa_phase) + " "
outstr += str(it.inclination) + " "
- outstr += str(it.latitude) + " "
+ outstr += str(it.latitude) + " "
outstr += str(it.longitude) + " "
outstr += str(it.polarization) + " "
-
+
outstr += str(row['sigmasq']) + " "
-
+
outstr += "\\n"
-
+
f.write(outstr)
""")
os.chmod('scripts/pycbc_banksim_collect_results', 0o0777)
@@ -380,10 +378,10 @@ if gpu:
else
echo "wrong length file"
exit 1
- fi
+ fi
function fuzzy_diff {
- echo " ($3>($1-$2)) && ($3>($2-$1)) " | bc
+ echo " ($3>($1-$2)) && ($3>($2-$1)) " | bc
}
exec 3<$1
@@ -393,12 +391,12 @@ if gpu:
IFS= read -r line2 <&4
do
line1=`echo "$line1" | cut --delimiter=' ' -f 1`
- line2=`echo "$line2" | cut --delimiter=' ' -f 1`
+ line2=`echo "$line2" | cut --delimiter=' ' -f 1`
if ! [[ "$line1" =~ ^[0-9]+([.][0-9]+)?$ ]] ; then
exec >&2; echo "error: Not a number"; exit 1
fi
-
+
if ! [[ "$line2" =~ ^[0-9]+([.][0-9]+)?$ ]] ; then
exec >&2; echo "error: Not a number"; exit 1
fi
@@ -408,8 +406,8 @@ if gpu:
if [ $ok -eq 0 ] ; then
echo "Files do not match"
exit 1
- fi
-
+ fi
+
done
@@ -420,7 +418,7 @@ if gpu:
exit 0
""")
os.chmod('scripts/diff_match.sh', 0o0777)
-
+
logging.info("Creating submit script")
f = open("submit.sh","w")
if gpu:
@@ -462,8 +460,8 @@ goldenratio = 2 / (1 + 5**.5)
# "legend.fontsize": 8.0,
# "figure.figsize": (3.3,3.3*goldenratio),
# "figure.dpi": 200,
-# "subplots.left": 0.2,
-# "subplots.right": 0.75,
+# "subplots.left": 0.2,
+# "subplots.right": 0.75,
# "subplots.bottom": 0.15,
# "subplots.top": 0.75,
# "savefig.dpi": 600,
@@ -475,7 +473,7 @@ match = res[:,0]
tmass1 = res[:,1]
tmass2 = res[:,2]
-tspin1x = res[:,3]
+tspin1x = res[:,3]
tspin1y = res[:,4]
tspin1z = res[:,5]
tspin2x = res[:,6]
@@ -529,27 +527,27 @@ mhist(imchirp-tmchirp, "plots/hist-mchirp-diff.png")
mhist((imchirp-tmchirp)/imchirp, "plots/hist-mchirp-reldiff.png")
mhist(match, "plots/hist-match.png")
mhist(match, "plots/hist-match-cum.png", cum=1, log=True, bins=10000, xl = "Match", yl="Fraction of injections < Match")
-
+
pylab.figure(102)
pylab.ylabel('Fraction of Injections')
pylab.xlabel('Fitting factor')
-pylab.yscale('log')
+pylab.yscale('log')
pylab.xlim(0.95, 1.0)
pylab.ylim(1e-4, 1)
hBins = pylab.arange(0.,1.,0.0005,dtype=float)
n, bins,patches=pylab.hist(match,cumulative=1,bins=hBins,density=True)
pylab.grid()
pylab.savefig("plots/cum_hist.png")
-
+
mplot(imass1, imass2, match, "plots/m1-m2-match.png")
mplot(tmass1, tmass2, match, "plots/tm1-tm2-match.png")
mplot(q, s1m, match, "plots/q-s1m-match.png")
mplot(q, s2m, match, "plots/q-s2m-match.png")
-mplot(q, ispin1z, match, "plots/q-s1z-match.png")
-mplot(q, ispin2z, match, "plots/q-s2z-match.png", "Mass Ratio", "Spin2z")
+mplot(q, ispin1z, match, "plots/q-s1z-match.png")
+mplot(q, ispin2z, match, "plots/q-s2z-match.png", "Mass Ratio", "Spin2z")
mplot(q, ispin2z, match, "plots/q-s2z-match97.png", "Mass Ratio", "Spin2z", vmin=0.97)
mplot(q, ispin2z, match, "plots/q-s2z-match90.png", "Mass Ratio", "Spin2z", vmin=0.90)
-mplot(inclination, match, match, "plots/inc-match.png")
+mplot(inclination, match, match, "plots/inc-match.png")
mplot(imass1, imass2, imchirp-tmchirp, "plots/m1-m2-mchirpdiff.png")
mplot(q, ispin1z, imchirp-tmchirp, "plots/q-s1z-mchirpdiff.png", "Mass Ratio", "Spin1z")
diff --git a/bin/pycbc_make_faithsim b/bin/pycbc_make_faithsim
index 95a421e21ee..552b767297d 100644
--- a/bin/pycbc_make_faithsim
+++ b/bin/pycbc_make_faithsim
@@ -6,10 +6,10 @@ import configparser as ConfigParser
import subprocess
import glob
import tempfile
-from optparse import OptionParser
+from argparse import ArgumentParser
from glue.pipeline import CondorDAGJob, CondorDAGNode, CondorDAG, CondorJob
-from pycbc import init_logging
+from pycbc import init_logging, add_common_pycbc_options
class BaseJob(CondorDAGJob, CondorJob):
def __init__(self, log_dir, executable, cp, section, accounting_group=None):
@@ -59,15 +59,13 @@ def matches_in_list(slist, match):
matches.append(st)
return matches
-parser = OptionParser()
-parser.add_option('--config', type=str)
-(options, args) = parser.parse_args()
+parser = ArgumentParser()
+add_common_pycbc_options(parser)
+parser.add_argument('--config', type=str, required=True)
+options = parser.parse_args()
-if options.config is None:
- raise ValueError("Config file is required")
-
-# logging INFO
-init_logging(1)
+# Default logging level is info: --verbose adds to this
+init_logging(options.verbose, default_level=1)
confs = ConfigParser.ConfigParser()
confs.read(options.config)
diff --git a/bin/pycbc_upload_xml_to_gracedb b/bin/pycbc_upload_xml_to_gracedb
index a7e03d7b280..6d32c4c965b 100755
--- a/bin/pycbc_upload_xml_to_gracedb
+++ b/bin/pycbc_upload_xml_to_gracedb
@@ -67,9 +67,6 @@ def check_gracedb_for_event(gdb_handle, query, far):
return False
-logging.basicConfig(format='%(asctime)s:%(levelname)s : %(message)s',
- level=logging.INFO)
-
parser = argparse.ArgumentParser(description=__doc__)
pycbc.add_common_pycbc_options(parser)
parser.add_argument("--psd-files", nargs='+', required=True,
@@ -113,7 +110,7 @@ parser.add_argument('--generate-plots', action='store_true',
args = parser.parse_args()
-pycbc.init_logging(args.verbose)
+pycbc.init_logging(args.verbose, default_level=1)
if args.production_server:
gracedb = GraceDb()
diff --git a/bin/pygrb/pycbc_grb_inj_finder b/bin/pygrb/pycbc_grb_inj_finder
index b9a53e4819c..8494dcd9c60 100644
--- a/bin/pygrb/pycbc_grb_inj_finder
+++ b/bin/pygrb/pycbc_grb_inj_finder
@@ -39,7 +39,7 @@ from gwdatafind.utils import filename_metadata
from ligo.segments import segmentlist
from ligo.segments.utils import fromsegwizard
-from pycbc import __version__
+from pycbc import __version__, add_common_pycbc_options, init_logging
from pycbc.inject import InjectionSet
from pycbc.results.pygrb_postprocessing_utils import template_hash_to_id
@@ -152,13 +152,8 @@ parser = argparse.ArgumentParser(
description=__doc__,
)
-parser.add_argument(
- "-v",
- "--verbose",
- action="store_true",
- default=False,
- help="verbose output with microsecond timer (default: %(default)s)",
-)
+add_common_pycbc_options(parser)
+
parser.add_argument(
"-V",
"--version",
@@ -223,6 +218,8 @@ parser.add_argument(
args = parser.parse_args()
+init_logging(args.verbose)
+
vprint = print if args.verbose else str
win = args.time_window
diff --git a/bin/pygrb/pycbc_grb_trig_cluster b/bin/pygrb/pycbc_grb_trig_cluster
index 0bb8f16a1a7..80a4d08bd8c 100644
--- a/bin/pygrb/pycbc_grb_trig_cluster
+++ b/bin/pygrb/pycbc_grb_trig_cluster
@@ -35,7 +35,7 @@ import h5py
from gwdatafind.utils import filename_metadata
from pycbc import __version__
-from pycbc import init_logging
+from pycbc import init_logging, add_common_pycbc_options
__author__ = "Duncan Macleod "
@@ -119,13 +119,7 @@ parser = argparse.ArgumentParser(
description=__doc__,
)
-parser.add_argument(
- "-v",
- "--verbose",
- action="store_true",
- default=False,
- help="verbose output with microsecond timer (default: %(default)s)",
-)
+add_common_pycbc_options(parser)
parser.add_argument(
"-V",
"--version",
diff --git a/bin/pygrb/pycbc_grb_trig_combiner b/bin/pygrb/pycbc_grb_trig_combiner
index b36067d845f..0e7e04b3f04 100644
--- a/bin/pygrb/pycbc_grb_trig_combiner
+++ b/bin/pygrb/pycbc_grb_trig_combiner
@@ -35,7 +35,7 @@ from gwdatafind.utils import (file_segment, filename_metadata)
from ligo import segments
from ligo.segments.utils import fromsegwizard
-from pycbc import __version__
+from pycbc import __version__, add_common_pycbc_options, init_logging
from pycbc.results.pygrb_postprocessing_utils import template_hash_to_id
__author__ = "Duncan Macleod "
@@ -340,13 +340,7 @@ parser = argparse.ArgumentParser(
description=__doc__,
)
-parser.add_argument(
- "-v",
- "--verbose",
- action="store_true",
- default=False,
- help="verbose output with microsecond timer (default: %(default)s)",
-)
+add_common_pycbc_options(parser)
parser.add_argument(
"-V",
"--version",
@@ -458,6 +452,8 @@ parser.add_argument(
args = parser.parse_args()
+init_logging(args.verbose)
+
vprint = print if args.verbose else str
vprint("-- Welcome to the PyGRB trigger combiner")
diff --git a/bin/pygrb/pycbc_make_offline_grb_workflow b/bin/pygrb/pycbc_make_offline_grb_workflow
index eef650d5d30..e8cb3db28d3 100644
--- a/bin/pygrb/pycbc_make_offline_grb_workflow
+++ b/bin/pygrb/pycbc_make_offline_grb_workflow
@@ -31,23 +31,28 @@ import sys
import os
import argparse
import logging
-import pycbc.workflow as _workflow
-from pycbc.workflow.core import configparser_value_to_file
-from ligo.segments import segment, segmentlistdict
import matplotlib
matplotlib.use('agg')
+
+from ligo.segments import segment, segmentlistdict
+
+from pycbc import init_logging, add_common_pycbc_options
+import pycbc.workflow as _workflow
+from pycbc.workflow.core import configparser_value_to_file
from pycbc.results.pygrb_plotting_utils import make_grb_segments_plot
workflow_name = "pygrb_offline"
-logging.basicConfig(format="%(asctime)s:%(levelname)s : %(message)s",
- level=logging.INFO)
# Parse command line options and instantiate pycbc workflow object
parser = argparse.ArgumentParser()
+add_common_pycbc_options(parser)
parser.add_argument("--version", action="version", version=__version__)
_workflow.add_workflow_command_line_group(parser)
_workflow.add_workflow_settings_cli(parser)
args = parser.parse_args()
+
+init_logging(args.verbose, default_level=1)
+
wflow = _workflow.Workflow(args, workflow_name)
all_files = _workflow.FileList([])
tags = []
diff --git a/bin/pygrb/pycbc_pygrb_efficiency b/bin/pygrb/pycbc_pygrb_efficiency
index 64f7e4e0f3a..a60585f55ce 100644
--- a/bin/pygrb/pycbc_pygrb_efficiency
+++ b/bin/pygrb/pycbc_pygrb_efficiency
@@ -31,6 +31,7 @@ from matplotlib import rc
import numpy as np
import scipy
from scipy import stats
+
import pycbc.version
from pycbc import init_logging
from pycbc.detector import Detector
diff --git a/bin/pygrb/pycbc_pygrb_grb_info_table b/bin/pygrb/pycbc_pygrb_grb_info_table
index 5f8e92496b6..9903886e8b7 100644
--- a/bin/pygrb/pycbc_pygrb_grb_info_table
+++ b/bin/pygrb/pycbc_pygrb_grb_info_table
@@ -25,7 +25,10 @@ import sys
import argparse
from datetime import datetime
import numpy
+
import lal
+
+from pycbc import add_common_pycbc_options, init_logging
import pycbc.version
import pycbc.results
from pycbc.detector import Detector
@@ -41,6 +44,7 @@ __program__ = "pycbc_pygrb_grb_info_table"
# =============================================================================
parser = argparse.ArgumentParser(description=__doc__, formatter_class=
argparse.ArgumentDefaultsHelpFormatter)
+add_common_pycbc_options(parser)
parser.add_argument("--version", action="version", version=__version__)
parser.add_argument("--trigger-time", type=int,
required=True,
@@ -63,6 +67,8 @@ parser.add_argument("--output-file", action="store",
opts = parser.parse_args()
+init_logging(opts.verbose)
+
headers = []
data = [[]]
diff --git a/bin/pygrb/pycbc_pygrb_minifollowups b/bin/pygrb/pycbc_pygrb_minifollowups
index 467d8e3ff06..302fb52327b 100644
--- a/bin/pygrb/pycbc_pygrb_minifollowups
+++ b/bin/pygrb/pycbc_pygrb_minifollowups
@@ -27,6 +27,7 @@ import os
import argparse
import logging
import h5py
+
from pycbc import init_logging
import pycbc.workflow as wf
from pycbc.workflow.core import FileList, resolve_url_to_file
@@ -106,8 +107,7 @@ def make_timeseries_plot(workflow, trig_file, snr_type, central_time,
parser = argparse.ArgumentParser(description=__doc__[1:])
parser.add_argument('--version', action='version',
version=pycbc.version.git_verbose_msg)
-parser.add_argument("-v", "--verbose", default=False, action="store_true",
- help="Verbose output")
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--trig-file',
help="HDF file with the triggers found by PyGRB")
parser.add_argument('--followups-file',
@@ -122,7 +122,7 @@ wf.add_workflow_settings_cli(parser, include_subdax_opts=True)
ppu.pygrb_add_bestnr_cut_opt(parser)
args = parser.parse_args()
-init_logging(args.verbose, format="%(asctime)s: %(levelname)s: %(message)s")
+pycbc.init_logging(args.verbose, format="%(asctime)s: %(levelname)s: %(message)s")
workflow = wf.Workflow(args)
diff --git a/bin/pygrb/pycbc_pygrb_page_tables b/bin/pygrb/pycbc_pygrb_page_tables
index c3dc22aec3b..c2e5c7a6674 100755
--- a/bin/pygrb/pycbc_pygrb_page_tables
+++ b/bin/pygrb/pycbc_pygrb_page_tables
@@ -28,6 +28,7 @@ import os
import logging
import numpy as np
import h5py
+
import pycbc.version
from pycbc.conversions import mchirp_from_mass1_mass2
from pycbc.detector import Detector
diff --git a/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr b/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr
index 4fa562fa9f0..d378d1aea96 100644
--- a/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr
+++ b/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr
@@ -33,6 +33,7 @@ from matplotlib import pyplot as plt
from matplotlib import rc
import numpy
import scipy
+
import pycbc.version
from pycbc import init_logging
from pycbc.detector import Detector
diff --git a/bin/pygrb/pycbc_pygrb_pp_workflow b/bin/pygrb/pycbc_pygrb_pp_workflow
index 12098b6c546..fc14632d79d 100644
--- a/bin/pygrb/pycbc_pygrb_pp_workflow
+++ b/bin/pygrb/pycbc_pygrb_pp_workflow
@@ -29,8 +29,9 @@ import socket
import logging
import argparse
import os
+
+import pycbc
import pycbc.version
-from pycbc import init_logging
import pycbc.workflow as _workflow
from pycbc.results import layout
from pycbc.results.pygrb_postprocessing_utils import extract_ifos
@@ -47,9 +48,8 @@ __program__ = "pycbc_pygrb_pp_workflow"
# =============================================================================
# Use the standard workflow command-line parsing routines.
parser = argparse.ArgumentParser(description=__doc__[1:])
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--version", action="version", version=__version__)
-parser.add_argument("-v", "--verbose", default=False, action="store_true",
- help="Verbose output")
parser.add_argument("-t", "--trig-files", action="store",
required=True, nargs="+",
help="The locations of the trigger files "
@@ -68,7 +68,7 @@ _workflow.add_workflow_command_line_group(parser)
_workflow.add_workflow_settings_cli(parser, include_subdax_opts=True)
args = parser.parse_args()
-init_logging(args.verbose, format="%(asctime)s: %(levelname)s: %(message)s")
+pycbc.init_logging(args.verbose, format="%(asctime)s: %(levelname)s: %(message)s")
# Store starting run directory
start_rundir = os.getcwd()
diff --git a/bin/workflow_comparisons/offline_search/pycbc_combine_injection_comparisons b/bin/workflow_comparisons/offline_search/pycbc_combine_injection_comparisons
index 94cf18d620d..0e34021121d 100755
--- a/bin/workflow_comparisons/offline_search/pycbc_combine_injection_comparisons
+++ b/bin/workflow_comparisons/offline_search/pycbc_combine_injection_comparisons
@@ -15,14 +15,15 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+import logging
from glob import glob
from os import path
import argparse
import numpy as np
from h5py import File
+
from pycbc.pnutils import mass1_mass2_to_mchirp_eta
import pycbc
-import logging
# Globals
@@ -142,6 +143,7 @@ formatter = argparse.RawDescriptionHelpFormatter
parser = argparse.ArgumentParser()
parser = argparse.ArgumentParser(formatter_class=formatter,
description=long_description)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--input-files", nargs='+', required=True,
help="List of comparison files created by running"
" 'pycbc_injection_set_comparison' on several injection"
@@ -151,8 +153,6 @@ parser.add_argument("--output-file", type=str, required=True,
parser.add_argument("--found-type", type=str, required=True,
choices=['found', 'found_after_vetoes'],
help="Which class of found injections to collate")
-parser.add_argument("--verbose", action="store_true", default=False,
- help="Print extra debugging information")
args = parser.parse_args()
pycbc.init_logging(args.verbose)
diff --git a/bin/workflow_comparisons/offline_search/pycbc_injection_set_comparison b/bin/workflow_comparisons/offline_search/pycbc_injection_set_comparison
index 02bd3fb5bd9..0b21dfe3ca6 100755
--- a/bin/workflow_comparisons/offline_search/pycbc_injection_set_comparison
+++ b/bin/workflow_comparisons/offline_search/pycbc_injection_set_comparison
@@ -14,15 +14,20 @@
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""
+Detailed comparison of a specified injection set between two PyCBC runs
+"""
+import logging
from glob import glob
from os import path
import argparse
import numpy as np
from h5py import File
+
from pycbc.events import ranking
import pycbc
-import logging
+
def parse_injection_path(injname, basedir):
dirpath = path.expandvars(basedir)
@@ -362,10 +367,8 @@ def compare_missed_injs(reference_run, comparison_run, outfp):
return
-parser = argparse.ArgumentParser()
-parser = argparse.ArgumentParser(usage="",
- description="Detailed comparison of a specified injection set between two"
- " PyCBC runs")
+parser = argparse.ArgumentParser(usage="", description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument("--injection-label", type=str, required=True,
help="Label of injection set")
parser.add_argument("--reference-dir", type=str, required=True,
@@ -390,8 +393,6 @@ parser.add_argument("--single-detector-statistic", type=str, default='newsnr',
choices=ranking.sngls_ranking_function_dict.keys(),
help="Which single-detector statistic to calculate for"
" found injections")
-parser.add_argument("--verbose", action="store_true", default=False,
- help="Print extra debugging information")
args = parser.parse_args()
pycbc.init_logging(args.verbose)
diff --git a/bin/workflow_comparisons/offline_search/pycbc_plot_injections_found_both_workflows b/bin/workflow_comparisons/offline_search/pycbc_plot_injections_found_both_workflows
index 277a196b344..c017a8bd755 100755
--- a/bin/workflow_comparisons/offline_search/pycbc_plot_injections_found_both_workflows
+++ b/bin/workflow_comparisons/offline_search/pycbc_plot_injections_found_both_workflows
@@ -14,7 +14,10 @@
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-
+"""
+Plot histograms of ratio of IFAR and ranking statistic of injections
+between two comparable runs
+"""
import h5py
import matplotlib
@@ -23,10 +26,10 @@ import matplotlib.pyplot as plt
import numpy as np
import argparse
-parser = argparse.ArgumentParser()
-parser = argparse.ArgumentParser(description="Plot histograms of ratio of IFAR"
- " and ranking statistic of injections between"
- " two comparable runs")
+from pycbc import add_common_pycbc_options, init_logging
+
+parser = argparse.ArgumentParser(description=__doc__)
+add_common_pycbc_options(parser)
parser.add_argument('--combined-comparison-file', required=True,
help="HDF file holding output from"
" 'pycbc_combine_injection_comparisons'")
@@ -43,6 +46,8 @@ parser.add_argument('--log-y', action='store_true', default=False,
help='Use logarithmic y-axis')
args = parser.parse_args()
+init_logging(args.verbose)
+
# Load in the two datasets
f = h5py.File(args.combined_comparison_file)
diff --git a/bin/workflow_comparisons/offline_search/pycbc_plot_injections_missed_one_workflow b/bin/workflow_comparisons/offline_search/pycbc_plot_injections_missed_one_workflow
index d25eb4511be..3ab191a5fed 100755
--- a/bin/workflow_comparisons/offline_search/pycbc_plot_injections_missed_one_workflow
+++ b/bin/workflow_comparisons/offline_search/pycbc_plot_injections_missed_one_workflow
@@ -14,6 +14,9 @@
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""Plot histograms of IFAR and ranking statistic of injections missed
+in only one of two comparable runs
+"""
import h5py
import matplotlib
@@ -22,10 +25,10 @@ import matplotlib.pyplot as plt
import numpy as np
import argparse
-parser = argparse.ArgumentParser()
-parser = argparse.ArgumentParser(description="Plot histograms of IFAR and"
- " ranking statistic of injections missed in"
- " only one of two comparable runs")
+from pycbc import add_common_pycbc_options, init_logging
+
+parser = argparse.ArgumentParser(description=__doc__)
+add_common_pycbc_options(parser)
parser.add_argument('--combined-comparison-file', required=True,
help="HDF file holding output of"
" 'pycbc_combine_injection_comparisons'")
@@ -45,6 +48,8 @@ parser.add_argument('--log-y', action='store_true', default=False,
help='Use logarithmic y-axis')
args = parser.parse_args()
+init_logging(args.verbose)
+
# Load in the two datasets
f = h5py.File(args.combined_comparison_file)
diff --git a/bin/workflow_comparisons/offline_search/pycbc_plot_vt_ratio_vs_ifar b/bin/workflow_comparisons/offline_search/pycbc_plot_vt_ratio_vs_ifar
index 4d9aa672b16..688a6801b6d 100755
--- a/bin/workflow_comparisons/offline_search/pycbc_plot_vt_ratio_vs_ifar
+++ b/bin/workflow_comparisons/offline_search/pycbc_plot_vt_ratio_vs_ifar
@@ -15,6 +15,10 @@
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""
+Plot ratios of VTs calculated at various IFARs using pycbc_page_sensitivity
+(run with --hdf-out option) for two comparable analyses
+"""
import h5py
import matplotlib
@@ -25,11 +29,10 @@ import argparse
from matplotlib.pyplot import cm
from math import ceil
-parser = argparse.ArgumentParser()
-parser = argparse.ArgumentParser(description="Plot ratios of VTs calculated at"
- " various IFARs using pycbc_page_sensitivity"
- " (run with --hdf-out option) for two"
- " comparable analyses ")
+from pycbc import add_common_pycbc_options, init_logging
+
+parser = argparse.ArgumentParser(description=__doc__)
+add_common_pycbc_options(parser)
parser.add_argument('--vt-file-one', required=True,
help="HDF file containing VT curves, first set of data"
" for comparison")
@@ -46,6 +49,8 @@ parser.add_argument('--log-y', action='store_true', default=False,
help='Use logarithmic y-axis')
args = parser.parse_args()
+init_logging(args.verbose)
+
# Load in the two datasets
f1 = h5py.File(args.vt_file_one)
f2 = h5py.File(args.vt_file_two)
diff --git a/bin/workflows/pycbc_make_bank_verifier_workflow b/bin/workflows/pycbc_make_bank_verifier_workflow
index ebc5e64a60f..27349611f70 100644
--- a/bin/workflows/pycbc_make_bank_verifier_workflow
+++ b/bin/workflows/pycbc_make_bank_verifier_workflow
@@ -29,6 +29,7 @@ import shutil
from ligo import segments
+from pycbc import add_common_pycbc_options, init_logging
import pycbc.version
import pycbc.workflow as wf
from pycbc.results import (static_table, layout)
@@ -154,11 +155,14 @@ class BanksimTablePointInjsExecutable(wf.Executable):
# so run this with --help to see what options are added.
_desc = __doc__[1:]
parser = argparse.ArgumentParser(description=_desc)
+add_common_pycbc_options(parser)
parser.add_argument('--version', action='version', version=__version__)
wf.add_workflow_command_line_group(parser)
wf.add_workflow_settings_cli(parser)
args = parser.parse_args()
+init_logging(args.verbose)
+
# FIXME: opts.tags is currently unused here.
# Create the workflow object
diff --git a/bin/workflows/pycbc_make_faithsim_workflow b/bin/workflows/pycbc_make_faithsim_workflow
index 663484cf368..37aae89a3a5 100755
--- a/bin/workflows/pycbc_make_faithsim_workflow
+++ b/bin/workflows/pycbc_make_faithsim_workflow
@@ -13,6 +13,8 @@ import configparser as ConfigParser
import numpy as np
import logging
import argparse
+
+from pycbc import add_common_pycbc_options, init_logging
from pycbc.workflow.plotting import PlotExecutable
from pycbc.workflow import setup_splittable_dax_generated
@@ -97,11 +99,14 @@ class CollectResultsExecutable(wf.Executable):
parser = argparse.ArgumentParser(description=__doc__)
+add_common_pycbc_options(parser)
parser.add_argument("--version", action="version", version=__version__)
wf.add_workflow_command_line_group(parser)
wf.add_workflow_settings_cli(parser)
args = parser.parse_args()
+init_logging(args.verbose)
+
workflow = wf.Workflow(args)
num_banks = workflow.cp.get("splitbank", "num_banks")
diff --git a/bin/workflows/pycbc_make_inference_inj_workflow b/bin/workflows/pycbc_make_inference_inj_workflow
index b23937c776f..a0cee1d1a94 100644
--- a/bin/workflows/pycbc_make_inference_inj_workflow
+++ b/bin/workflows/pycbc_make_inference_inj_workflow
@@ -26,8 +26,9 @@ import numpy
import pycbc.version
import socket
import sys
+
from pycbc import __version__
-from pycbc import results
+from pycbc import results, init_logging, add_common_pycbc_options
from pycbc.results import layout
from pycbc.results import metadata
from pycbc.workflow import configuration
@@ -74,6 +75,7 @@ def symlink_path(f, path):
# set command line parser
parser = argparse.ArgumentParser(description=__doc__[1:])
+add_common_pycbc_options(parser)
# injection options: either specify a number to create, or use the given file
group = parser.add_mutually_exclusive_group(required=True)
@@ -96,6 +98,8 @@ parser.add_argument("--version", action="version", version=__version__,
# parser command line
opts = parser.parse_args()
+init_logging(opts.verbose, default_level=1)
+
# configuration files
config_file_tmplt = 'inference-{}.ini'
config_file_dir = 'config_files'
@@ -114,10 +118,6 @@ core.makedir('{}/{}'.format(opts.output_dir, config_file_dir))
core.makedir('{}/{}'.format(opts.output_dir, posterior_file_dir))
core.makedir('{}/{}'.format(opts.output_dir, injection_file_dir))
-# log to terminal until we know where the path to log output file
-log_format = "%(asctime)s:%(levelname)s : %(message)s"
-logging.basicConfig(format=log_format, level=logging.INFO)
-
# create workflow and sub-workflows
workflow = core.Workflow(opts, name=opts.workflow_name)
finalize_workflow = core.Workflow(opts, name="finalization")
@@ -196,15 +196,9 @@ log_file_html = core.File(workflow.ifos, "WORKFLOW-LOG",
workflow.analysis_time,
extension=".html", directory=rdir["workflow"])
-# switch saving log to file
-logging.basicConfig(format=log_format, level=logging.INFO,
- filename=log_file_txt.storage_path, filemode="w")
-log_file = logging.FileHandler(filename=log_file_txt.storage_path, mode="w")
-log_file.setLevel(logging.INFO)
-formatter = logging.Formatter(log_format)
-log_file.setFormatter(formatter)
-logging.getLogger("").addHandler(log_file)
-logging.info("Created log file %s" % log_file_txt.storage_path)
+# Save log to file as well
+init_logging(opts.verbose, default_level=1, to_file=log_file_txt.storage_path)
+logging.info("Created log file %s", log_file_txt.storage_path)
config_files = {}
posterior_files = core.FileList([])
diff --git a/bin/workflows/pycbc_make_inference_plots_workflow b/bin/workflows/pycbc_make_inference_plots_workflow
index 52237650b98..a74b6afe0b8 100644
--- a/bin/workflows/pycbc_make_inference_plots_workflow
+++ b/bin/workflows/pycbc_make_inference_plots_workflow
@@ -29,8 +29,10 @@ import socket
import sys
import shlex
import numpy
+
from ligo import segments
-from pycbc import results
+
+from pycbc import results, init_logging, add_common_pycbc_options
from pycbc.results import layout
from pycbc.types import MultiDetOptionAction
from pycbc.types import MultiDetOptionAppendAction
@@ -127,6 +129,7 @@ def symlink_path(f, path):
# command line parser
parser = argparse.ArgumentParser(description=__doc__[1:])
+add_common_pycbc_options(parser)
# add option groups
configuration.add_workflow_command_line_group(parser)
# workflow options
@@ -147,8 +150,7 @@ core.makedir('{}/{}'.format(opts.output_dir, config_file_dir))
core.makedir('{}/{}'.format(opts.output_dir, posterior_file_dir))
# log to terminal until we know where the path to log output file
-log_format = "%(asctime)s:%(levelname)s : %(message)s"
-logging.basicConfig(format=log_format, level=logging.INFO)
+init_logging(opts.verbose, default_level=1)
# create workflow and sub-workflows
container = core.Workflow(opts, opts.workflow_name)
@@ -183,14 +185,8 @@ log_file_txt = core.File(workflow.ifos, "workflow-log", workflow.analysis_time,
log_file_html = core.File(workflow.ifos, "WORKFLOW-LOG", workflow.analysis_time,
extension=".html", directory=rdir["workflow"])
-# switch saving log to file
-logging.basicConfig(format=log_format, level=logging.INFO,
- filename=log_file_txt.storage_path, filemode="w")
-log_file = logging.FileHandler(filename=log_file_txt.storage_path, mode="w")
-log_file.setLevel(logging.INFO)
-formatter = logging.Formatter(log_format)
-log_file.setFormatter(formatter)
-logging.getLogger("").addHandler(log_file)
+# Save log to file as well
+init_logging(opts.verbose, default_level=1, to_file=log_file_txt.storage_path)
logging.info("Created log file %s" % log_file_txt.storage_path)
config_files = {}
diff --git a/bin/workflows/pycbc_make_inference_workflow b/bin/workflows/pycbc_make_inference_workflow
index 9978bce38fa..9139c3cc0f3 100644
--- a/bin/workflows/pycbc_make_inference_workflow
+++ b/bin/workflows/pycbc_make_inference_workflow
@@ -23,12 +23,14 @@ import h5py
import logging
import os
import shlex
-import pycbc
-import pycbc.workflow.minifollowups as mini
import socket
import sys
import numpy
+
from ligo import segments
+
+import pycbc
+import pycbc.workflow.minifollowups as mini
from pycbc import results
from pycbc.results import layout
from pycbc.types import MultiDetOptionAction
@@ -151,6 +153,7 @@ def symlink_path(f, path):
# command line parser
parser = argparse.ArgumentParser(description=__doc__[1:])
+pycbc.add_common_pycbc_options(parser)
# add option groups
configuration.add_workflow_command_line_group(parser)
# workflow options
@@ -167,6 +170,9 @@ parser.add_argument("--version", action="version", version=__version__,
# parser command line
opts = parser.parse_args()
+# log to terminal until we know where the path to log output file
+pycbc.init_logging(opts.verbose, default_level=1)
+
# configuration files
config_file_tmplt = 'inference-{}.ini'
config_file_dir = 'config_files'
@@ -182,10 +188,6 @@ core.makedir(opts.output_dir)
core.makedir('{}/{}'.format(opts.output_dir, config_file_dir))
core.makedir('{}/{}'.format(opts.output_dir, posterior_file_dir))
-# log to terminal until we know where the path to log output file
-log_format = "%(asctime)s:%(levelname)s : %(message)s"
-logging.basicConfig(format=log_format, level=logging.INFO)
-
# create workflow and sub-workflows
container = core.Workflow(opts, opts.workflow_name)
workflow = core.Workflow(opts, 'main')
@@ -219,14 +221,8 @@ log_file_html = core.File(workflow.ifos, "WORKFLOW-LOG",
workflow.analysis_time,
extension=".html", directory=rdir["workflow"])
-# switch saving log to file
-logging.basicConfig(format=log_format, level=logging.INFO,
- filename=log_file_txt.storage_path, filemode="w")
-log_file = logging.FileHandler(filename=log_file_txt.storage_path, mode="w")
-log_file.setLevel(logging.INFO)
-formatter = logging.Formatter(log_format)
-log_file.setFormatter(formatter)
-logging.getLogger("").addHandler(log_file)
+# Save log to file
+pycbc.init_logging(opts.verbose, default_level=1, to_file=log_file_txt.storage_path)
logging.info("Created log file %s" % log_file_txt.storage_path)
config_files = {}
diff --git a/bin/workflows/pycbc_make_offline_search_workflow b/bin/workflows/pycbc_make_offline_search_workflow
index 25fc1e627ba..50cc7dd8c01 100755
--- a/bin/workflows/pycbc_make_offline_search_workflow
+++ b/bin/workflows/pycbc_make_offline_search_workflow
@@ -29,11 +29,19 @@ __program__ = "pycbc_offline"
import sys
import socket
-import pycbc.events, pycbc.workflow as wf
-import os, argparse, logging
+import os
+import argparse
+import logging
import configparser as ConfigParser
+import numpy
+import datetime
+import itertools
+
+import lal
from ligo import segments
-import numpy, lal, datetime, itertools
+
+import pycbc.events
+import pycbc.workflow as wf
from pycbc.results import static_table, layout, save_fig_with_metadata
from pycbc.results.metadata import html_escape
@@ -144,16 +152,14 @@ def check_stop(job_name, container, workflow, finalize_workflow):
parser = argparse.ArgumentParser(description=__doc__[1:])
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--version', action='version', version=__version__)
-parser.add_argument('--verbose', action='count',
- help="Incrementally add more verbosity")
wf.add_workflow_command_line_group(parser)
wf.add_workflow_settings_cli(parser)
args = parser.parse_args()
-# By default, we do logging.info, each --verbose adds a level of verbosity
-logging_level = args.verbose + 1 if args.verbose else 1
-pycbc.init_logging(logging_level)
+# Default logging level is info: --verbose adds to this
+pycbc.init_logging(args.verbose, default_level=1)
container = wf.Workflow(args, args.workflow_name)
workflow = wf.Workflow(args, args.workflow_name + '-main')
@@ -176,20 +182,13 @@ rdir = layout.SectionNumber('results', ['analysis_time',
wf.makedir(rdir.base)
wf.makedir(rdir['workflow'])
+# We are _also_ logging to a file
wf_log_file = wf.File(workflow.ifos, 'workflow-log', workflow.analysis_time,
extension='.txt',
directory=rdir['workflow'])
-logging.basicConfig(format='%(asctime)s:%(levelname)s : %(message)s',
- filename=wf_log_file.storage_path,
- level=logging.INFO,
- filemode='w')
-
-logfile = logging.FileHandler(filename=wf_log_file.storage_path,mode='w')
-logfile.setLevel(logging.INFO)
-formatter = logging.Formatter('%(asctime)s:%(levelname)s : %(message)s')
-logfile.setFormatter(formatter)
-logging.getLogger('').addHandler(logfile)
+pycbc.init_logging(args.verbose, default_level=1,
+ to_file=wf_log_file.storage_path)
logging.info("Created log file %s" % wf_log_file.storage_path)
# put start / end time at top of summary page
diff --git a/bin/workflows/pycbc_make_psd_estimation_workflow b/bin/workflows/pycbc_make_psd_estimation_workflow
index 7dcc1bad4db..78ad36c24d6 100644
--- a/bin/workflows/pycbc_make_psd_estimation_workflow
+++ b/bin/workflows/pycbc_make_psd_estimation_workflow
@@ -19,28 +19,31 @@
"""Program for setting up a workflow which estimates the average PSD of a given
portion of strain data."""
-import pycbc
-import pycbc.version
-import pycbc.workflow
import os.path
import argparse
import logging
-from ligo import segments as _segments
import datetime
-import lal, sys
+import sys
+
+from ligo import segments as _segments
+import lal
+
+import pycbc
+import pycbc.version
+import pycbc.workflow
from pycbc.results import save_fig_with_metadata, two_column_layout
import pycbc.workflow
-logging.basicConfig(format='%(asctime)s:%(levelname)s : %(message)s',
- level=logging.INFO)
-
parser = argparse.ArgumentParser(description=__doc__)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--version', action='version',
version=pycbc.version.git_verbose_msg)
pycbc.workflow.add_workflow_command_line_group(parser)
pycbc.workflow.add_workflow_settings_cli(parser)
args = parser.parse_args()
+pycbc.init_logging(args.verbose, default_level=1)
+
# FIXME: opts.tags is currently unused here.
container = pycbc.workflow.Workflow(args)
diff --git a/bin/workflows/pycbc_make_sbank_workflow b/bin/workflows/pycbc_make_sbank_workflow
index db0c491b3e7..d413b5cb86f 100644
--- a/bin/workflows/pycbc_make_sbank_workflow
+++ b/bin/workflows/pycbc_make_sbank_workflow
@@ -26,6 +26,7 @@ how a simple workflow is constructed with pycbc.workflow.
#imports
import os
import argparse
+
import pycbc
import pycbc.version
import pycbc.workflow as wf
@@ -169,6 +170,7 @@ class CombineHDFBanksExecutable(wf.Executable):
# so run this with --help to see what options are added.
_desc = __doc__[1:]
parser = argparse.ArgumentParser(description=_desc)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--version', action='version', version=__version__)
parser.add_argument("--output-file", type=str, default=None,
help="Specify the output file name. Either a name can be "
@@ -180,6 +182,8 @@ wf.add_workflow_command_line_group(parser)
wf.add_workflow_settings_cli(parser, include_subdax_opts=True)
args = parser.parse_args()
+pycbc.init_logging(args.verbose)
+
# Create the workflow object
workflow = wf.Workflow(args)
wf.makedir(args.output_dir)
diff --git a/bin/workflows/pycbc_make_uberbank_workflow b/bin/workflows/pycbc_make_uberbank_workflow
index d2ff77c1f05..c0498971706 100644
--- a/bin/workflows/pycbc_make_uberbank_workflow
+++ b/bin/workflows/pycbc_make_uberbank_workflow
@@ -35,6 +35,7 @@ that.
import os
import argparse
import logging
+
import pycbc
import pycbc.version
import pycbc.workflow as wf
@@ -140,11 +141,14 @@ class SbankDaxGenerator(wf.Executable):
# so run this with --help to see what options are added.
_desc = __doc__[1:]
parser = argparse.ArgumentParser(description=_desc)
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--version', action='version', version=__version__)
wf.add_workflow_command_line_group(parser)
wf.add_workflow_settings_cli(parser)
args = parser.parse_args()
+pycbc.init_logging(args.verbose)
+
# Create the workflow object
workflow = wf.Workflow(args)
diff --git a/examples/cal/foton_filter_esd_saturation/pycbc_foton_filter b/examples/cal/foton_filter_esd_saturation/pycbc_foton_filter
index 8c4c042147e..a3863e33555 100644
--- a/examples/cal/foton_filter_esd_saturation/pycbc_foton_filter
+++ b/examples/cal/foton_filter_esd_saturation/pycbc_foton_filter
@@ -21,6 +21,7 @@ import numpy
import sys
from foton import FilterFile, Filter
+from pycbc import init_logging, add_common_pycbc_options
from pycbc.filter.fotonfilter import filter_data, get_swstat_bits, read_gain_from_frames
from pycbc.frame import frame_paths
from pycbc.inject import InjectionSet, legacy_approximant_name
@@ -34,6 +35,8 @@ ifo_list = ['H1', 'L1']
parser = argparse.ArgumentParser(usage='pycbc_foton_filter [--options]',
description='Filter a single-column ASCII time series.')
+add_common_pycbc_options(parser)
+
# injection options
parser.add_argument("--data-file", type=str, required=True,
help="Path to single-column ASCII file with time series.")
@@ -80,8 +83,7 @@ parser.add_argument("--sample-rate", type=int, required=True,
opts = parser.parse_args()
# setup log
-logging_level = logging.DEBUG
-logging.basicConfig(format='%(asctime)s : %(message)s', level=logging_level)
+pycbc.init_logging(args.verbose, default_level=2)
# read data file with time series
data = numpy.loadtxt(opts.data_file)
diff --git a/examples/live/check_results.py b/examples/live/check_results.py
index 31089efbe28..c51d1912248 100755
--- a/examples/live/check_results.py
+++ b/examples/live/check_results.py
@@ -187,6 +187,7 @@ def check_found_events(args):
parser = argparse.ArgumentParser()
+pycbc.add_common_pycbc_options(parser)
parser.add_argument('--gps-start', type=float, required=True)
parser.add_argument('--gps-end', type=float, required=True)
parser.add_argument('--f-min', type=float, required=True)
@@ -195,7 +196,7 @@ def check_found_events(args):
parser.add_argument('--detectors', type=str, required=True, nargs='+')
args = parser.parse_args()
-log.basicConfig(level=log.INFO, format='%(asctime)s %(message)s')
+pycbc.init_logging(args.verbose, default_level=1)
single_fail = check_single_results(args)
found_fail = check_found_events(args)
diff --git a/pycbc/__init__.py b/pycbc/__init__.py
index 850adc67dd5..3b6f6f1e8f2 100644
--- a/pycbc/__init__.py
+++ b/pycbc/__init__.py
@@ -86,7 +86,8 @@ def add_common_pycbc_options(parser):
'logging at the info level, but -vv or '
'--verbose --verbose provides debug logging.')
-def init_logging(verbose=False,
+
+def init_logging(verbose=False, default_level=0, to_file=None,
format='%(asctime)s %(levelname)s : %(message)s'):
"""Common utility for setting up logging in PyCBC.
@@ -100,6 +101,12 @@ def init_logging(verbose=False,
or an integer representing the level to set. If True/False will set to
``logging.INFO``/``logging.WARN``. For higher logging levels, pass
an integer representing the level to set. (1 = INFO, 2 = DEBUG).
+ default_level : int, optional
+ The default level, to be added to any verbose option if it is an
+ integer, or set to this value if it is None or False
+ to_file : filepath
+ Set up logging to a file instead of the stderr. File will be
+ overwritten if it already exists.
format : str, optional
The format to use for logging messages.
"""
@@ -119,11 +126,15 @@ def sig_handler(signum, frame):
# See https://docs.python.org/3/library/logging.html#levels
# for log level definitions
logger = logging.getLogger()
- verbose_int = 0 if verbose is None else int(verbose)
+ verbose_int = default_level if verbose is None \
+ else int(verbose) + default_level
logger.setLevel(logging.WARNING - verbose_int * 10) # Initial setting
- sh = logging.StreamHandler()
- logger.addHandler(sh)
- sh.setFormatter(LogFormatter(fmt=format))
+ if to_file is not None:
+ handler = logging.FileHandler(to_file, mode='w')
+ else:
+ handler = logging.StreamHandler()
+ logger.addHandler(handler)
+ handler.setFormatter(LogFormatter(fmt=format))
def makedir(path):
diff --git a/pycbc/_version.py b/pycbc/_version.py
index 617cf4a523e..cdb4e915a3d 100644
--- a/pycbc/_version.py
+++ b/pycbc/_version.py
@@ -25,6 +25,9 @@
import argparse
import inspect
import subprocess
+import logging
+
+logger = logging.getLogger('pycbc._version')
def print_link(library):
diff --git a/pycbc/_version_helper.py b/pycbc/_version_helper.py
index 4dde5936e5c..27c093af341 100644
--- a/pycbc/_version_helper.py
+++ b/pycbc/_version_helper.py
@@ -25,6 +25,9 @@
import subprocess
import re
import distutils.version
+import logging
+
+logger = logging.getLogger('pycbc._version_helper')
class GitInfo(object):
diff --git a/pycbc/bin_utils.py b/pycbc/bin_utils.py
index e04c52cbb72..6727d57d03b 100644
--- a/pycbc/bin_utils.py
+++ b/pycbc/bin_utils.py
@@ -7,6 +7,9 @@
NegInf = float("-inf")
import numpy
import math
+import logging
+
+logger = logging.getLogger('pycbc.bin_utils')
class Bins(object):
diff --git a/pycbc/boundaries.py b/pycbc/boundaries.py
index ac00d68afd9..2a22c8c9f9a 100644
--- a/pycbc/boundaries.py
+++ b/pycbc/boundaries.py
@@ -28,6 +28,10 @@
"""
import numpy
+import logging
+
+logger = logging.getLogger('pycbc.boundaries')
+
class _Bound(float):
"""Adds methods to float for boundary comparisons."""
diff --git a/pycbc/conversions.py b/pycbc/conversions.py
index 20c78f88949..3daf5c8a1b0 100644
--- a/pycbc/conversions.py
+++ b/pycbc/conversions.py
@@ -30,17 +30,23 @@
import copy
import numpy
+import logging
+
import lal
+
from pycbc.detector import Detector
import pycbc.cosmology
+from pycbc import neutron_stars as ns
+
from .coordinates import (
spherical_to_cartesian as _spherical_to_cartesian,
cartesian_to_spherical as _cartesian_to_spherical)
-from pycbc import neutron_stars as ns
pykerr = pycbc.libutils.import_optional('pykerr')
lalsim = pycbc.libutils.import_optional('lalsimulation')
+logger = logging.getLogger('pycbc.conversions')
+
#
# =============================================================================
#
diff --git a/pycbc/cosmology.py b/pycbc/cosmology.py
index f97f5132b69..092a5758e01 100644
--- a/pycbc/cosmology.py
+++ b/pycbc/cosmology.py
@@ -38,6 +38,7 @@
from astropy.cosmology import parameters
import pycbc.conversions
+logger = logging.getLogger('pycbc.cosmology')
DEFAULT_COSMOLOGY = 'Planck15'
@@ -181,10 +182,10 @@ def z_at_value(func, fval, unit, zmax=1000., **kwargs):
counter += 1
if counter == 5:
# give up and warn the user
- logging.warning("One or more values correspond to a "
- "redshift > {0:.1e}. The redshift for these "
- "have been set to inf. If you would like "
- "better precision, call God.".format(zmax))
+ logger.warning("One or more values correspond to a "
+ "redshift > {0:.1e}. The redshift for these "
+ "have been set to inf. If you would like "
+ "better precision, call God.".format(zmax))
break
return pycbc.conversions.formatreturn(zs, input_is_array)
diff --git a/pycbc/detector.py b/pycbc/detector.py
index b1cbcf2cfc0..261b0eb5feb 100644
--- a/pycbc/detector.py
+++ b/pycbc/detector.py
@@ -29,16 +29,21 @@
between observatories.
"""
import os
+import logging
import numpy as np
+from numpy import cos, sin, pi
+
import lal
-import pycbc.libutils
-from pycbc.types import TimeSeries
-from pycbc.types.config import InterpolatingConfigParser
from astropy.time import Time
from astropy import constants, coordinates, units
from astropy.coordinates.matrix_utilities import rotation_matrix
from astropy.units.si import sday, meter
-from numpy import cos, sin, pi
+
+import pycbc.libutils
+from pycbc.types import TimeSeries
+from pycbc.types.config import InterpolatingConfigParser
+
+logger = logging.getLogger('pycbc.detector')
# Response functions are modelled after those in lalsuite and as also
# presented in https://arxiv.org/pdf/gr-qc/0008066.pdf
diff --git a/pycbc/dq.py b/pycbc/dq.py
index 1eed4ae1281..0cff913a157 100644
--- a/pycbc/dq.py
+++ b/pycbc/dq.py
@@ -32,6 +32,7 @@
from pycbc.frame.gwosc import get_run
from pycbc.io import get_file
+logger = logging.getLogger('pycbc.dq')
def parse_veto_definer(veto_def_filename, ifos):
""" Parse a veto definer file from the filename and return a dictionary
@@ -117,10 +118,10 @@ def query_dqsegdb2(detector, flag_name, start_time, end_time, server):
host=server)
return query_res['active']
except Exception as e:
- logging.error('Could not query segment database, check name '
- '(%s), times (%d-%d) and server (%s)',
- complete_flag, int(start_time), int(end_time),
- server)
+ logger.error('Could not query segment database, check name '
+ '(%s), times (%d-%d) and server (%s)',
+ complete_flag, int(start_time), int(end_time),
+ server)
raise e
def query_flag(ifo, segment_name, start_time, end_time,
diff --git a/pycbc/events/coherent.py b/pycbc/events/coherent.py
index adabf3e7ce5..dd13ce9513a 100644
--- a/pycbc/events/coherent.py
+++ b/pycbc/events/coherent.py
@@ -24,9 +24,11 @@
""" This module contains functions for calculating and manipulating coherent
triggers.
"""
-
+import logging
import numpy as np
+logger = logging.getLogger('pycbc.events.coherent')
+
def get_coinc_indexes(idx_dict, time_delay_idx):
"""Return the indexes corresponding to coincident triggers, requiring
diff --git a/pycbc/events/ranking.py b/pycbc/events/ranking.py
index 5da85d01019..0368bb05371 100644
--- a/pycbc/events/ranking.py
+++ b/pycbc/events/ranking.py
@@ -1,8 +1,11 @@
""" This module contains functions for calculating single-ifo ranking
statistic values
"""
+import logging
import numpy
+logger = logging.getLogger('pycbc.events.ranking')
+
def effsnr(snr, reduced_x2, fac=250.):
"""Calculate the effective SNR statistic. See (S5y1 paper) for definition.
diff --git a/pycbc/events/threshold_cpu.py b/pycbc/events/threshold_cpu.py
index 3b6d99bed2c..86ec991e801 100644
--- a/pycbc/events/threshold_cpu.py
+++ b/pycbc/events/threshold_cpu.py
@@ -21,11 +21,14 @@
#
# =============================================================================
#
+import logging
import numpy
from .simd_threshold_cython import parallel_thresh_cluster, parallel_threshold
from .eventmgr import _BaseThresholdCluster
from .. import opt
+logger = logging.getLogger('pycbc.events.threshold_cpu')
+
if opt.HAVE_GETCONF:
default_segsize = opt.LEVEL2_CACHE_SIZE / numpy.dtype('complex64').itemsize
else:
diff --git a/pycbc/events/threshold_cuda.py b/pycbc/events/threshold_cuda.py
index f810c48c141..4a9dc860dbb 100644
--- a/pycbc/events/threshold_cuda.py
+++ b/pycbc/events/threshold_cuda.py
@@ -21,6 +21,7 @@
#
# =============================================================================
#
+import logging
import numpy, mako.template
from pycuda.tools import dtype_to_ctype
from pycuda.elementwise import ElementwiseKernel
@@ -28,6 +29,8 @@
from .eventmgr import _BaseThresholdCluster
import pycbc.scheme
+logger = logging.getLogger('pycbc.events.threshold_cuda')
+
threshold_op = """
if (i == 0)
bn[0] = 0;
diff --git a/pycbc/events/triggers.py b/pycbc/events/triggers.py
index 489d29debdb..5be3580e24f 100644
--- a/pycbc/events/triggers.py
+++ b/pycbc/events/triggers.py
@@ -16,13 +16,16 @@
""" This modules contains functions for reading single and coincident triggers
from the command line.
"""
-
+import logging
import h5py
import numpy
+
from pycbc import conversions, pnutils
from pycbc.events import coinc
import pycbc.detector
+logger = logging.getLogger('pycbc.events.triggers')
+
def insert_bank_bins_option_group(parser):
""" Add options to the optparser object for selecting templates in bins.
diff --git a/pycbc/events/veto.py b/pycbc/events/veto.py
index 2a06311d1e8..9fe636e328f 100644
--- a/pycbc/events/veto.py
+++ b/pycbc/events/veto.py
@@ -1,10 +1,13 @@
""" This module contains utilities to manipulate trigger lists based on
segment.
"""
+import logging
import numpy
from ligo.lw import table, lsctables, utils as ligolw_utils
from ligo.segments import segment, segmentlist
+logger = logging.getLogger('pycbc.events.veto')
+
def start_end_to_segments(start, end):
return segmentlist([segment(s, e) for s, e in zip(start, end)])
diff --git a/pycbc/fft/fftw_pruned.py b/pycbc/fft/fftw_pruned.py
index 239dd32c0e6..d4fd0fcdab9 100644
--- a/pycbc/fft/fftw_pruned.py
+++ b/pycbc/fft/fftw_pruned.py
@@ -16,6 +16,8 @@
import logging
from .fftw_pruned_cython import second_phase_cython
+logger = logging.getLogger('pycbc.events.fftw_pruned')
+
warn_msg = ("The FFTW_pruned module can be used to speed up computing SNR "
"timeseries by computing first at a low sample rate and then "
"computing at full sample rate only at certain samples. This code "
@@ -24,7 +26,7 @@
"This code would need verification before trusting results. "
"Please do contribute test cases.")
-logging.warning(warn_msg)
+logger.warning(warn_msg)
# FFTW constants
FFTW_FORWARD = -1
diff --git a/pycbc/fft/npfft.py b/pycbc/fft/npfft.py
index 40ef0733278..77439dca58c 100644
--- a/pycbc/fft/npfft.py
+++ b/pycbc/fft/npfft.py
@@ -31,6 +31,8 @@
from .core import _check_fft_args
from .core import _BaseFFT, _BaseIFFT
+logger = logging.getLogger('pycbc.events.npfft')
+
_INV_FFT_MSG = ("I cannot perform an {} between data with an input type of "
"{} and an output type of {}")
@@ -76,7 +78,7 @@ class FFT(_BaseFFT):
"""
def __init__(self, invec, outvec, nbatch=1, size=None):
super(FFT, self).__init__(invec, outvec, nbatch, size)
- logging.warning(WARN_MSG)
+ logger.warning(WARN_MSG)
self.prec, self.itype, self.otype = _check_fft_args(invec, outvec)
def execute(self):
@@ -89,7 +91,7 @@ class IFFT(_BaseIFFT):
"""
def __init__(self, invec, outvec, nbatch=1, size=None):
super(IFFT, self).__init__(invec, outvec, nbatch, size)
- logging.warning(WARN_MSG)
+ logger.warning(WARN_MSG)
self.prec, self.itype, self.otype = _check_fft_args(invec, outvec)
def execute(self):
diff --git a/pycbc/filter/fotonfilter.py b/pycbc/filter/fotonfilter.py
index cc084936196..3cb4a6c4930 100644
--- a/pycbc/filter/fotonfilter.py
+++ b/pycbc/filter/fotonfilter.py
@@ -23,6 +23,8 @@
# import dependencies that are not standard to pycbc
from foton import Filter, iir2z
+logger = logging.getLogger('pycbc.filter.fotonfilter')
+
def get_swstat_bits(frame_filenames, swstat_channel_name, start_time, end_time):
''' This function just checks the first time in the SWSTAT channel
to see if the filter was on, it doesn't check times beyond that.
@@ -74,7 +76,7 @@ def filter_data(data, filter_name, filter_file, bits, filterbank_off=False,
# if bit is on then filter the data
bit = int(bits[-(i+1)])
if bit:
- logging.info('filtering with filter module %d', i)
+ logger.info('filtering with filter module %d', i)
# if there are second-order sections then filter with them
if len(filter.sections):
@@ -84,7 +86,9 @@ def filter_data(data, filter_name, filter_file, bits, filterbank_off=False,
else:
coeffs = iir2z(filter_file[filter_name][i])
if len(coeffs) > 1:
- logging.info('Gain-only filter module return more than one number')
+ logger.info(
+ 'Gain-only filter module return more than one number'
+ )
sys.exit()
gain = coeffs[0]
data = gain * data
diff --git a/pycbc/filter/matchedfilter.py b/pycbc/filter/matchedfilter.py
index c14a7104220..e974ab2eb39 100644
--- a/pycbc/filter/matchedfilter.py
+++ b/pycbc/filter/matchedfilter.py
@@ -28,6 +28,8 @@
import logging
from math import sqrt
+import numpy
+
from pycbc.types import TimeSeries, FrequencySeries, zeros, Array
from pycbc.types import complex_same_precision_as, real_same_precision_as
from pycbc.fft import fft, ifft, IFFT
@@ -35,7 +37,8 @@
from pycbc import events
from pycbc.events import ranking
import pycbc
-import numpy
+
+logger = logging.getLogger('pycbc.filter.matchedfilter')
BACKEND_PREFIX="pycbc.filter.matchedfilter_"
@@ -276,7 +279,7 @@ def full_matched_filter_and_cluster_symm(self, segnum, template_norm, window, ep
if len(idx) == 0:
return [], [], [], [], []
- logging.info("%s points above threshold" % str(len(idx)))
+ logger.info("%d points above threshold", len(idx))
snr = TimeSeries(self.snr_mem, epoch=epoch, delta_t=self.delta_t, copy=False)
corr = FrequencySeries(self.corr_mem, delta_f=self.delta_f, copy=False)
@@ -323,7 +326,7 @@ def full_matched_filter_and_cluster_fc(self, segnum, template_norm, window, epoc
if len(idx) == 0:
return [], [], [], [], []
- logging.info("%s points above threshold" % str(len(idx)))
+ logger.info("%d points above threshold", len(idx))
snr = TimeSeries(self.snr_mem, epoch=epoch, delta_t=self.delta_t, copy=False)
corr = FrequencySeries(self.corr_mem, delta_f=self.delta_f, copy=False)
@@ -366,7 +369,7 @@ def full_matched_filter_thresh_only(self, segnum, template_norm, window=None, ep
self.ifft.execute()
idx, snrv = events.threshold_only(self.snr_mem[self.segments[segnum].analyze],
self.snr_threshold / norm)
- logging.info("%s points above threshold" % str(len(idx)))
+ logger.info("%d points above threshold", len(idx))
snr = TimeSeries(self.snr_mem, epoch=epoch, delta_t=self.delta_t, copy=False)
corr = FrequencySeries(self.corr_mem, delta_f=self.delta_f, copy=False)
@@ -426,8 +429,8 @@ def hierarchical_matched_filter_and_cluster(self, segnum, template_norm, window)
return [], None, [], [], []
idx_red, _ = events.cluster_reduce(idx_red, snrv_red, window / self.downsample_factor)
- logging.info("%s points above threshold at reduced resolution"\
- %(str(len(idx_red)),))
+ logger.info("%d points above threshold at reduced resolution",
+ len(idx_red))
# The fancy upsampling is here
if self.upsample_method=='pruned_fft':
@@ -463,7 +466,7 @@ def hierarchical_matched_filter_and_cluster(self, segnum, template_norm, window)
else:
idx, snrv = [], []
- logging.info("%s points at full rate and clustering" % len(idx))
+ logger.info("%d points at full rate and clustering", len(idx))
return self.snr_mem, norm, self.corr_mem_full, idx, snrv
else:
raise ValueError("Invalid upsample method")
@@ -953,11 +956,11 @@ def full_matched_filter_and_cluster(self, hplus, hcross, hplus_norm,
if len(idx) == 0:
return [], 0, 0, [], [], [], [], 0, 0, 0
- logging.info("%s points above threshold", str(len(idx)))
+ logger.info("%d points above threshold", len(idx))
idx, snrv = events.cluster_reduce(idx, snrv, window)
- logging.info("%s clustered points", str(len(idx)))
+ logger.info("%d clustered points", len(idx))
# erased self.
u_vals, coa_phase = self._maximized_extrinsic_params\
(I_plus.data, I_cross.data, hplus_cross_corr,
@@ -1759,8 +1762,8 @@ def _process_batch(self):
# We have an SNR so high that we will drop the entire analysis
# of this chunk of time!
if self.snr_abort_threshold is not None and s > self.snr_abort_threshold:
- logging.info("We are seeing some *really* high SNRs, lets"
- " assume they aren't signals and just give up")
+ logger.info("We are seeing some *really* high SNRs, let's "
+ "assume they aren't signals and just give up")
return False, []
veto_info.append((snrv, norm, l, htilde, stilde))
@@ -1978,8 +1981,8 @@ def followup_event_significance(ifo, data_reader, bank,
peak_full + half_dur_samples + 1)
baysnr = snr[snr_slice]
- logging.info('Adding %s to candidate, pvalue %s, %s samples', ifo,
- pvalue, nsamples)
+ logger.info('Adding %s to candidate, pvalue %s, %s samples', ifo,
+ pvalue, nsamples)
return {
'snr_series': baysnr * norm,
diff --git a/pycbc/inference/burn_in.py b/pycbc/inference/burn_in.py
index b5cd2970a06..83bea886048 100644
--- a/pycbc/inference/burn_in.py
+++ b/pycbc/inference/burn_in.py
@@ -34,6 +34,8 @@
from pycbc.io.record import get_vars_from_arg
+logger = logging.getLogger('pycbc.inference.burn_in')
+
# The value to use for a burn-in iteration if a chain is not burned in
NOT_BURNED_IN_ITER = -1
@@ -548,7 +550,7 @@ def evaluate(self, filename):
"""Runs all of the burn-in tests."""
# evaluate all the tests
for tst in self.do_tests:
- logging.info("Evaluating %s burn-in test", tst)
+ logger.info("Evaluating %s burn-in test", tst)
getattr(self, tst)(filename)
# evaluate each chain at a time
for ci in range(self.nchains):
@@ -562,8 +564,8 @@ def evaluate(self, filename):
tibi, tbi)
self.is_burned_in[ci] = is_burned_in
self.burn_in_iteration[ci] = burn_in_iter
- logging.info("Number of chains burned in: %i of %i",
- self.is_burned_in.sum(), self.nchains)
+ logger.info("Number of chains burned in: %i of %i",
+ self.is_burned_in.sum(), self.nchains)
def write(self, fp, path=None):
"""Writes burn-in info to an open HDF file.
@@ -731,17 +733,17 @@ def evaluate(self, filename):
"""Runs all of the burn-in tests."""
# evaluate all the tests
for tst in self.do_tests:
- logging.info("Evaluating %s burn-in test", tst)
+ logger.info("Evaluating %s burn-in test", tst)
getattr(self, tst)(filename)
is_burned_in, burn_in_iter = evaluate_tests(
self.burn_in_test, self.test_is_burned_in,
self.test_burn_in_iteration)
self.is_burned_in = is_burned_in
self.burn_in_iteration = burn_in_iter
- logging.info("Is burned in: %r", self.is_burned_in)
+ logger.info("Is burned in: %r", self.is_burned_in)
if self.is_burned_in:
- logging.info("Burn-in iteration: %i",
- int(self.burn_in_iteration))
+ logger.info("Burn-in iteration: %i",
+ int(self.burn_in_iteration))
@staticmethod
def _extra_tests_from_config(cp, section, tag):
diff --git a/pycbc/inject/inject.py b/pycbc/inject/inject.py
index 2825f93cd42..c1149344616 100644
--- a/pycbc/inject/inject.py
+++ b/pycbc/inject/inject.py
@@ -27,11 +27,14 @@
import os
import numpy as np
-import lal
import copy
import logging
from abc import ABCMeta, abstractmethod
import h5py
+
+import lal
+from ligo.lw import utils as ligolw_utils, ligolw, lsctables
+
from pycbc import waveform, frame, libutils
from pycbc.opt import LimitedSizeDict
from pycbc.waveform import (get_td_waveform, fd_det,
@@ -44,7 +47,8 @@
from pycbc.filter import resample_to_delta_t
import pycbc.io
from pycbc.io.ligolw import LIGOLWContentHandler
-from ligo.lw import utils as ligolw_utils, ligolw, lsctables
+
+logger = logging.getLogger('pycbc.inject.inject')
sim = libutils.import_optional('lalsimulation')
@@ -111,7 +115,7 @@ def projector(detector_name, inj, hp, hc, distance_scale=1):
if hasattr(inj, 'detector_projection_method'):
projection_method = inj.detector_projection_method
- logging.info('Injecting at %s, method is %s', tc, projection_method)
+ logger.info('Injecting at %s, method is %s', tc, projection_method)
# compute the detector response and add it to the strain
signal = detector.project_wave(hp_tapered, hc_tapered,
diff --git a/pycbc/io/__init__.py b/pycbc/io/__init__.py
index ffe51f29c48..b410e513d8b 100644
--- a/pycbc/io/__init__.py
+++ b/pycbc/io/__init__.py
@@ -4,6 +4,8 @@
from .record import *
from .gracedb import *
+logger = logging.getLogger('pycbc.io')
+
def get_file(url, retry=5, **args):
""" Retrieve file with retry upon failure
@@ -17,7 +19,7 @@ def get_file(url, retry=5, **args):
try:
return download_file(url, **args)
except Exception as e:
- logging.warning("Failed on attempt %d to download %s", i, url)
+ logger.warning("Failed on attempt %d to download %s", i, url)
if i >= retry:
- logging.error("Giving up on %s", url)
+ logger.error("Giving up on %s", url)
raise e
diff --git a/pycbc/io/hdf.py b/pycbc/io/hdf.py
index 9b688e8475c..35043d910e8 100644
--- a/pycbc/io/hdf.py
+++ b/pycbc/io/hdf.py
@@ -25,6 +25,8 @@
from pycbc.events import ranking, veto
from pycbc.events import mean_if_greater_than_zero
+logger = logging.getLogger('pycbc.io.hdf')
+
class HFile(h5py.File):
""" Low level extensions to the capabilities of reading an hdf5 File
@@ -238,8 +240,8 @@ def __len__(self):
def __add__(self, other):
if self.data == {}:
- logging.debug('Adding data to a DictArray instance which'
- ' was initialized with an empty dict')
+ logger.debug('Adding data to a DictArray instance which '
+ 'was initialized with an empty dict')
return self._return(data=other)
data = {}
@@ -247,7 +249,7 @@ def __add__(self, other):
try:
data[k] = np.concatenate([self.data[k], other.data[k]])
except KeyError:
- logging.info('%s does not exist in other data' % k)
+ logger.info('%s does not exist in other data', k)
return self._return(data=data)
def select(self, idx):
@@ -459,7 +461,7 @@ def get_column(self, col):
Values from the dataset, filtered if requested and
concatenated in order of file list
"""
- logging.info('getting %s' % col)
+ logger.info('getting %s', col)
vals = []
for f in self.files:
d = FileData(f, group=self.group, columnlist=self.columns,
@@ -468,7 +470,7 @@ def get_column(self, col):
# Close each file since h5py has an upper limit on the number of
# open file objects (approx. 1000)
d.close()
- logging.info('- got %i values' % sum(len(v) for v in vals))
+ logger.info('- got %i values', sum(len(v) for v in vals))
return np.concatenate(vals)
@@ -513,14 +515,14 @@ def __init__(self, trig_file, detector, bank_file=None, veto_file=None,
chunksize : int , default 10**6
Size of chunks to read in for the filter_rank / threshold.
"""
- logging.info('Loading triggers')
+ logger.info('Loading triggers')
self.trigs_f = HFile(trig_file, 'r')
self.trigs = self.trigs_f[detector]
self.ntriggers = self.trigs['end_time'].size
self.ifo = detector # convenience attributes
self.detector = detector
if bank_file:
- logging.info('Loading bank')
+ logger.info('Loading bank')
self.bank = HFile(bank_file, 'r')
else:
# empty dict in place of non-existent hdf file
@@ -538,8 +540,8 @@ def __init__(self, trig_file, detector, bank_file=None, veto_file=None,
if filter_rank:
assert filter_threshold is not None
- logging.info("Applying threshold of %.3f on %s",
- filter_threshold, filter_rank)
+ logger.info("Applying threshold of %.3f on %s",
+ filter_threshold, filter_rank)
fcn_dsets = (ranking.sngls_ranking_function_dict[filter_rank],
ranking.required_datasets[filter_rank])
idx, _ = self.trigs_f.select(
@@ -551,7 +553,7 @@ def __init__(self, trig_file, detector, bank_file=None, veto_file=None,
group=detector,
chunksize=chunksize,
)
- logging.info("%d triggers remain", idx.size)
+ logger.info("%d triggers remain", idx.size)
# If self.mask already has values, need to take these into account:
self.and_masks(idx)
@@ -559,10 +561,10 @@ def __init__(self, trig_file, detector, bank_file=None, veto_file=None,
# Apply a filter on the triggers which is _not_ a ranking statistic
for rank_str in ranking.sngls_ranking_function_dict.keys():
if f'self.{rank_str}' in filter_func:
- logging.warning('Supplying the ranking (%s) in '
- 'filter_func is inefficient, suggest to '
- 'use filter_rank instead.', rank_str)
- logging.info('Setting up filter function')
+ logger.warning('Supplying the ranking (%s) in '
+ 'filter_func is inefficient, suggest to '
+ 'use filter_rank instead.', rank_str)
+ logger.info('Setting up filter function')
for c in self.trigs.keys():
if c in filter_func:
setattr(self, '_'+c, self.trigs[c][:])
@@ -578,22 +580,22 @@ def __init__(self, trig_file, detector, bank_file=None, veto_file=None,
if c in filter_func: delattr(self, '_'+c)
self.apply_mask(filter_mask)
- logging.info('%i triggers remain after cut on %s',
- sum(self.mask), filter_func)
+ logger.info('%i triggers remain after cut on %s',
+ sum(self.mask), filter_func)
if veto_file:
- logging.info('Applying veto segments')
+ logger.info('Applying veto segments')
# veto_mask is an array of indices into the trigger arrays
# giving the surviving triggers
- logging.info('%i triggers before vetoes', self.mask_size)
+ logger.info('%i triggers before vetoes', self.mask_size)
veto_mask, _ = events.veto.indices_outside_segments(
self.end_time, [veto_file],
ifo=detector, segment_name=segment_name)
# Update mask accordingly
self.apply_mask(veto_mask)
- logging.info('%i triggers remain after vetoes',
- self.mask_size)
+ logger.info('%i triggers remain after vetoes',
+ self.mask_size)
def __getitem__(self, key):
# Is key in the TRIGGER_MERGE file?
@@ -714,10 +716,10 @@ def mask_to_n_loudest_clustered_events(self, rank_method,
self.apply_mask(keep)
if len(stat) == 0:
- logging.warning("No triggers after thresholding")
+ logger.warning("No triggers after thresholding")
return
else:
- logging.info("%d triggers after thresholding", len(stat))
+ logger.info("%d triggers after thresholding", len(stat))
index = stat.argsort()[::-1]
new_times = []
@@ -938,8 +940,8 @@ def __init__(self, coinc_file, bank_file, sngl_files=None, n_loudest=None,
raise RuntimeError("IFOs in statmap file not all represented "
"by single-detector trigger files.")
if not sorted(self.sngl_files.keys()) == sorted(self.ifos):
- logging.warning("WARNING: Single-detector trigger files "
- "given for IFOs not in the statmap file")
+ logger.warning("WARNING: Single-detector trigger files "
+ "given for IFOs not in the statmap file")
self.bank_file = HFile(bank_file, "r")
self.n_loudest = n_loudest
@@ -957,8 +959,8 @@ def sort_arr(self):
try:
ifar = self.coinc_file.get_column('ifar')
except KeyError:
- logging.warning("WARNING: Can't find inclusive IFAR!"
- "Using exclusive IFAR instead ...")
+ logger.warning("WARNING: Can't find inclusive IFAR!"
+ "Using exclusive IFAR instead ...")
ifar = self.coinc_file.get_column('ifar_exc')
self._inclusive = False
else:
@@ -1247,7 +1249,7 @@ def to_coinc_hdf_object(self, file_name):
ofd = h5py.File(file_name,'w')
# Some fields are special cases
- logging.info("Outputting search results")
+ logger.info("Outputting search results")
time = self.get_end_time()
# time will be used later to determine active ifos
ofd['time'] = time
@@ -1263,7 +1265,7 @@ def to_coinc_hdf_object(self, file_name):
for field in ['stat']:
ofd[field] = self.get_coincfile_array(field)
- logging.info("Outputting template information")
+ logger.info("Outputting template information")
# Bank fields
for field in ['mass1','mass2','spin1z','spin2z']:
ofd[field] = self.get_bankfile_array(field)
@@ -1272,8 +1274,8 @@ def to_coinc_hdf_object(self, file_name):
mass2 = self.get_bankfile_array('mass2')
ofd['chirp_mass'], _ = pnutils.mass1_mass2_to_mchirp_eta(mass1, mass2)
- logging.info("Outputting single-trigger information")
- logging.info("reduced chisquared")
+ logger.info("Outputting single-trigger information")
+ logger.info("reduced chisquared")
chisq_vals_valid = self.get_snglfile_array_dict('chisq')
chisq_dof_vals_valid = self.get_snglfile_array_dict('chisq_dof')
for ifo in self.ifos:
@@ -1287,12 +1289,12 @@ def to_coinc_hdf_object(self, file_name):
# Single-detector fields
for field in ['sg_chisq', 'end_time', 'sigmasq',
'psd_var_val']:
- logging.info(field)
+ logger.info(field)
try:
vals_valid = self.get_snglfile_array_dict(field)
except KeyError:
- logging.info(field + " is not present in the "
- "single-detector files")
+ logger.info("%s is not present in the "
+ "single-detector files", field)
for ifo in self.ifos:
# Some of the values will not be valid for all IFOs,
@@ -1313,7 +1315,7 @@ def to_coinc_hdf_object(self, file_name):
network_snr_sq[valid] += vals[valid] ** 2.0
ofd['network_snr'] = np.sqrt(network_snr_sq)
- logging.info("Triggered detectors")
+ logger.info("Triggered detectors")
# Create a n_ifos by n_events matrix, with the ifo letter if the
# event contains a trigger from the ifo, empty string if not
triggered_matrix = [[ifo[0] if v else ''
@@ -1325,7 +1327,7 @@ def to_coinc_hdf_object(self, file_name):
ofd.create_dataset('trig', data=triggered_detectors,
dtype=' 360:
return True
@@ -166,7 +168,7 @@ def compute_minus_network_snr(v, *argv):
if len(argv) == 1:
argv = argv[0]
nsnr, _ = compute_network_snr_core(v, *argv)
- logging.debug('snr: %s', nsnr)
+ logger.debug('snr: %s', nsnr)
return -nsnr
@@ -196,7 +198,7 @@ def optimize_di(bounds, cli_args, extra_args, initial_point):
# add the initial point to the population
population = numpy.concatenate((population[:-1],
initial_point))
- logging.debug('Initial population: %s', population)
+ logger.debug('Initial population: %s', population)
results = differential_evolution(
compute_minus_network_snr,
@@ -261,7 +263,7 @@ def optimize_pso(bounds, cli_args, extra_args, initial_point):
# add the initial point to the population
population = numpy.concatenate((population[:-1],
initial_point))
- logging.debug('Initial population: %s', population)
+ logger.debug('Initial population: %s', population)
optimizer = ps.single.GlobalBestPSO(
n_particles=int(cli_args.snr_opt_pso_particles),
diff --git a/pycbc/mchirp_area.py b/pycbc/mchirp_area.py
index da21eaed794..eeca5036759 100644
--- a/pycbc/mchirp_area.py
+++ b/pycbc/mchirp_area.py
@@ -9,12 +9,17 @@
"""
import math
+import logging
import numpy as np
-from pycbc.conversions import mass2_from_mchirp_mass1 as m2mcm1
+
from scipy.integrate import quad
-from pycbc.cosmology import _redshift
from astropy.cosmology import FlatLambdaCDM
+from pycbc.cosmology import _redshift
+from pycbc.conversions import mass2_from_mchirp_mass1 as m2mcm1
+
+logger = logging.getLogger('pycbc.mchirp_area')
+
def insert_args(parser):
mchirp_group = parser.add_argument_group("Arguments for estimating the "
diff --git a/pycbc/opt.py b/pycbc/opt.py
index a7a3ef40f8b..113defd67a8 100644
--- a/pycbc/opt.py
+++ b/pycbc/opt.py
@@ -22,6 +22,8 @@
import logging
from collections import OrderedDict
+logger = logging.getLogger('pycbc.opt')
+
# Work around different Python versions to get runtime
# info on hardware cache sizes
_USE_SUBPROCESS = False
@@ -38,7 +40,8 @@
if os.environ.get("LEVEL2_CACHE_SIZE", None):
LEVEL2_CACHE_SIZE = int(os.environ["LEVEL2_CACHE_SIZE"])
- logging.info("opt: using LEVEL2_CACHE_SIZE %d from environment" % LEVEL2_CACHE_SIZE)
+ logger.info("opt: using LEVEL2_CACHE_SIZE %d from environment",
+ LEVEL2_CACHE_SIZE)
elif HAVE_GETCONF:
if _USE_SUBPROCESS:
def getconf(confvar):
@@ -98,19 +101,27 @@ def verify_optimization_options(opt, parser):
if opt.cpu_affinity_from_env is not None:
if opt.cpu_affinity is not None:
- logging.error("Both --cpu_affinity_from_env and --cpu_affinity specified")
+ logger.error(
+ "Both --cpu_affinity_from_env and --cpu_affinity specified"
+ )
sys.exit(1)
requested_cpus = os.environ.get(opt.cpu_affinity_from_env)
if requested_cpus is None:
- logging.error("CPU affinity requested from environment variable %s "
- "but this variable is not defined" % opt.cpu_affinity_from_env)
+ logger.error(
+ "CPU affinity requested from environment variable %s "
+ "but this variable is not defined",
+ opt.cpu_affinity_from_env
+ )
sys.exit(1)
if requested_cpus == '':
- logging.error("CPU affinity requested from environment variable %s "
- "but this variable is empty" % opt.cpu_affinity_from_env)
+ logger.error(
+ "CPU affinity requested from environment variable %s "
+ "but this variable is empty",
+ opt.cpu_affinity_from_env
+ )
sys.exit(1)
if requested_cpus is None:
@@ -121,11 +132,13 @@ def verify_optimization_options(opt, parser):
retcode = os.system(command)
if retcode != 0:
- logging.error('taskset command <%s> failed with return code %d' % \
- (command, retcode))
+ logger.error(
+ 'taskset command <%s> failed with return code %d',
+ command, retcode
+ )
sys.exit(1)
- logging.info("Pinned to CPUs %s " % requested_cpus)
+ logger.info("Pinned to CPUs %s ", requested_cpus)
class LimitedSizeDict(OrderedDict):
""" Fixed sized dict for FIFO caching"""
diff --git a/pycbc/pnutils.py b/pycbc/pnutils.py
index 9582ce7414e..632a9156913 100644
--- a/pycbc/pnutils.py
+++ b/pycbc/pnutils.py
@@ -26,11 +26,16 @@
"""This module contains convenience pN functions. This includes calculating conversions
between quantities.
"""
-import lal
+import logging
import numpy
+
+import lal
from scipy.optimize import bisect, brentq, minimize
+
from pycbc import conversions, libutils
+logger = logging.getLogger('pycbc.pnutils')
+
lalsim = libutils.import_optional('lalsimulation')
def nearest_larger_binary_number(input_len):
diff --git a/pycbc/pool.py b/pycbc/pool.py
index 8f2095181ee..6f76088412c 100644
--- a/pycbc/pool.py
+++ b/pycbc/pool.py
@@ -8,6 +8,8 @@
import atexit
import logging
+logger = logging.getLogger('pycbc.pool')
+
def is_main_process():
""" Check if this is the main control process and may handle one time tasks
"""
@@ -139,8 +141,10 @@ def use_mpi(require_mpi=False, log=True):
if size > 1:
use_mpi = True
if log:
- logging.info('Running under mpi with size: %s, rank: %s',
- size, rank)
+ logger.info(
+ 'Running under mpi with size: %s, rank: %s',
+ size, rank
+ )
except ImportError as e:
if require_mpi:
print(e)
@@ -162,12 +166,12 @@ def choose_pool(processes, mpi=False):
atexit.register(pool.close)
if processes:
- logging.info('NOTE: that for MPI process size determined by '
- 'MPI launch size, not the processes argument')
+ logger.info('NOTE: that for MPI process size determined by '
+ 'MPI launch size, not the processes argument')
if do_mpi and not mpi:
- logging.info('NOTE: using MPI as this process was launched'
- 'under MPI')
+ logger.info('NOTE: using MPI as this process was launched'
+ 'under MPI')
except ImportError:
raise ValueError("Failed to start up an MPI pool, "
"install mpi4py / schwimmbad")
diff --git a/pycbc/population/live_pastro.py b/pycbc/population/live_pastro.py
index 2de2745a1ad..c933cd4e32c 100644
--- a/pycbc/population/live_pastro.py
+++ b/pycbc/population/live_pastro.py
@@ -1,13 +1,16 @@
import logging
import h5py
import numpy
-from pycbc import conversions as conv
+
from pycbc.tmpltbank import bank_conversions as bankconv
from pycbc.events import triggers
+from pycbc import conversions as conv
from . import fgmc_functions as fgmcfun
_s_per_yr = 1. / conv.sec_to_year(1.)
+logger = logging.getLogger('pycbc.population.live_pastro')
+
def check_template_param_bin_data(spec_json):
"""
@@ -72,11 +75,11 @@ def read_template_bank_param(spec_d, bankf):
# All the templates
tids = numpy.arange(len(bank['mass1']))
# Get param vals
- logging.info('Getting %s values from bank', spec_d['param'])
+ logger.info('Getting %s values from bank', spec_d['param'])
parvals = bankconv.get_bank_property(spec_d['param'], bank, tids)
counts, edges = numpy.histogram(parvals, bins=spec_d['bin_edges'])
bank_data = {'bin_edges': edges, 'tcounts': counts, 'num_t': counts.sum()}
- logging.info('Binned template counts: %s', counts)
+ logger.info('Binned template counts: %s', counts)
return bank_data
@@ -170,7 +173,7 @@ def template_param_bin_pa(padata, trdata, horizons):
trig_param = triggers.get_param(padata.spec['param'], None, *massspin)
# NB digitize gives '1' for first bin, '2' for second etc.
bind = numpy.digitize(trig_param, padata.bank['bin_edges']) - 1
- logging.debug('Trigger %s is in bin %i', padata.spec['param'], bind)
+ logger.debug('Trigger %s is in bin %i', padata.spec['param'], bind)
# Get noise rate density
if 'bg_fac' not in padata.spec:
@@ -180,24 +183,24 @@ def template_param_bin_pa(padata, trdata, horizons):
# FAR is in Hz, therefore convert to rate per year (per SNR)
dnoise = noise_density_from_far(trdata['far'], expfac) * _s_per_yr
- logging.debug('FAR %.3g, noise density per yr per SNR %.3g',
- trdata['far'], dnoise)
+ logger.debug('FAR %.3g, noise density per yr per SNR %.3g',
+ trdata['far'], dnoise)
# Scale by fraction of templates in bin
dnoise *= padata.bank['tcounts'][bind] / padata.bank['num_t']
- logging.debug('Noise density in bin %.3g', dnoise)
+ logger.debug('Noise density in bin %.3g', dnoise)
# Get signal rate density per year at given SNR
dsig = signal_pdf_from_snr(trdata['network_snr'],
padata.spec['netsnr_thresh'])
- logging.debug('SNR %.3g, signal pdf %.3g', trdata['network_snr'], dsig)
+ logger.debug('SNR %.3g, signal pdf %.3g', trdata['network_snr'], dsig)
dsig *= padata.spec['sig_per_yr_binned'][bind]
- logging.debug('Signal density per yr per SNR in bin %.3g', dsig)
+ logger.debug('Signal density per yr per SNR in bin %.3g', dsig)
# Scale by network sensitivity accounting for BNS horizon distances
dsig *= signal_rate_rescale(horizons, padata.spec['ref_bns_horizon'])
- logging.debug('After horizon rescaling %.3g', dsig)
+ logger.debug('After horizon rescaling %.3g', dsig)
p_astro = dsig / (dsig + dnoise)
- logging.debug('p_astro %.4g', p_astro)
+ logger.debug('p_astro %.4g', p_astro)
return p_astro, 1 - p_astro
@@ -221,7 +224,7 @@ def template_param_bin_types_pa(padata, trdata, horizons):
trig_param = triggers.get_param(padata.spec['param'], None, *massspin)
# NB digitize gives '1' for first bin, '2' for second etc.
bind = numpy.digitize(trig_param, padata.bank['bin_edges']) - 1
- logging.debug('Trigger %s is in bin %i', padata.spec['param'], bind)
+ logger.debug('Trigger %s is in bin %i', padata.spec['param'], bind)
# Get noise rate density
if 'bg_fac' not in padata.spec:
@@ -234,30 +237,30 @@ def template_param_bin_types_pa(padata, trdata, horizons):
# FAR is in Hz, therefore convert to rate per year (per SNR)
dnoise = noise_density_from_far(trdata['far'], expfac) * _s_per_yr
- logging.debug('FAR %.3g, noise density per yr per SNR %.3g',
- trdata['far'], dnoise)
+ logger.debug('FAR %.3g, noise density per yr per SNR %.3g',
+ trdata['far'], dnoise)
# Scale by fraction of templates in bin
dnoise *= padata.bank['tcounts'][bind] / padata.bank['num_t']
- logging.debug('Noise density in bin %.3g', dnoise)
+ logger.debug('Noise density in bin %.3g', dnoise)
# Back out trials factor to give noise density for triggered event type
dnoise /= float(trials_type(len(tr_ifos), len(trdata['sensitive'])))
- logging.debug('Divide by previously applied trials factor: %.3g', dnoise)
+ logger.debug('Divide by previously applied trials factor: %.3g', dnoise)
# Get signal rate density per year at given SNR
dsig = signal_pdf_from_snr(trdata['network_snr'],
padata.spec['netsnr_thresh'])
- logging.debug('SNR %.3g, signal pdf %.3g', trdata['network_snr'], dsig)
+ logger.debug('SNR %.3g, signal pdf %.3g', trdata['network_snr'], dsig)
dsig *= padata.spec['sig_per_yr_binned'][bind]
- logging.debug('Total signal density per yr per SNR in bin %.3g', dsig)
+ logger.debug('Total signal density per yr per SNR in bin %.3g', dsig)
# Scale by network sensitivity accounting for BNS horizons
dsig *= signal_rate_rescale(horizons, padata.spec['ref_bns_horizon'])
- logging.debug('After network horizon rescaling %.3g', dsig)
+ logger.debug('After network horizon rescaling %.3g', dsig)
# Scale by relative signal rate in triggered ifos
dsig *= signal_rate_trig_type(horizons, trdata['sensitive'], tr_ifos)
- logging.debug('After triggered ifo rate rescaling %.3g', dsig)
+ logger.debug('After triggered ifo rate rescaling %.3g', dsig)
p_astro = dsig / (dsig + dnoise)
- logging.debug('p_astro %.4g', p_astro)
+ logger.debug('p_astro %.4g', p_astro)
return p_astro, 1 - p_astro
diff --git a/pycbc/population/live_pastro_utils.py b/pycbc/population/live_pastro_utils.py
index 1e93863ac97..be9c9d5993f 100644
--- a/pycbc/population/live_pastro_utils.py
+++ b/pycbc/population/live_pastro_utils.py
@@ -2,6 +2,8 @@
import json
from . import live_pastro as livepa
+logger = logging.getLogger('pycbc.population.live_pastro_utils')
+
def insert_live_pastro_option_group(parser):
""" Add low-latency p astro options to the argparser object.
@@ -72,7 +74,7 @@ def __init__(self, specfile, bank):
except KeyError as ke:
raise ValueError("Can't find 'method' in p_astro spec file!") \
from ke
- logging.info('Setting up p_astro data with method %s', self.method)
+ logger.info('Setting up p_astro data with method %s', self.method)
self.spec = _check_spec[self.method](self.spec_json)
self.bank = _read_bank[self.method](self.spec, bank)
@@ -93,9 +95,9 @@ def apply_significance_limits(self, trigger_data):
trigger_data['network_snr'] < snrlim:
return trigger_data
- logging.debug('Truncating FAR and SNR from %f, %f to %f, %f',
- trigger_data['far'], trigger_data['network_snr'],
- farlim, snrlim)
+ logger.debug('Truncating FAR and SNR from %f, %f to %f, %f',
+ trigger_data['far'], trigger_data['network_snr'],
+ farlim, snrlim)
trigger_data['network_snr'] = snrlim
trigger_data['far'] = farlim
return trigger_data
@@ -107,7 +109,7 @@ def do_pastro_calc(self, trigger_data, horizons):
if not self.do:
return None, None
- logging.info('Computing p_astro')
+ logger.info('Computing p_astro')
p_astro, p_terr = _do_calc[self.method](self, trigger_data, horizons)
return p_astro, p_terr
diff --git a/pycbc/psd/read.py b/pycbc/psd/read.py
index 23f5dbd6445..8aa50c00e26 100644
--- a/pycbc/psd/read.py
+++ b/pycbc/psd/read.py
@@ -22,6 +22,8 @@
import scipy.interpolate
from pycbc.types import FrequencySeries
+logger = logging.getLogger('pycbc.psd.read')
+
def from_numpy_arrays(freq_data, noise_data, length, delta_f, low_freq_cutoff):
"""Interpolate n PSD (as two 1-dimensional arrays of frequency and data)
to the desired length, delta_f and low frequency cutoff.
@@ -62,11 +64,11 @@ def from_numpy_arrays(freq_data, noise_data, length, delta_f, low_freq_cutoff):
noise_data = noise_data[data_start:]
if (length - 1) * delta_f > freq_data[-1]:
- logging.warning('Requested number of samples exceeds the highest '
- 'available frequency in the input data, '
- 'will use max available frequency instead. '
- '(requested %f Hz, available %f Hz)',
- (length - 1) * delta_f, freq_data[-1])
+ logger.warning('Requested number of samples exceeds the highest '
+ 'available frequency in the input data, '
+ 'will use max available frequency instead. '
+ '(requested %f Hz, available %f Hz)',
+ (length - 1) * delta_f, freq_data[-1])
length = int(freq_data[-1]/delta_f + 1)
flog = numpy.log(freq_data)
diff --git a/pycbc/rate.py b/pycbc/rate.py
index 0fad5d34dce..bf526506637 100644
--- a/pycbc/rate.py
+++ b/pycbc/rate.py
@@ -1,7 +1,11 @@
import numpy
import bisect
+import logging
+
from . import bin_utils
+logger = logging.getLogger('pycbc.rate')
+
def integral_element(mu, pdf):
'''
diff --git a/pycbc/results/pygrb_postprocessing_utils.py b/pycbc/results/pygrb_postprocessing_utils.py
index 1f86611cb89..510e6b213c8 100644
--- a/pycbc/results/pygrb_postprocessing_utils.py
+++ b/pycbc/results/pygrb_postprocessing_utils.py
@@ -27,12 +27,16 @@
import logging
import argparse
import copy
-
import numpy
import h5py
+
from scipy import stats
import ligo.segments as segments
from pycbc.events.coherent import reweightedsnr_cut
+from pycbc import add_common_pycbc_options
+
+logger = logging.getLogger('pycbc.results.pygrb_postprocessing_utils')
+
# All/most of these final imports will become obsolete with hdf5 switch
try:
from ligo.lw import utils
@@ -57,9 +61,8 @@ def pygrb_initialize_plot_parser(description=None, version=None):
formatter_class = argparse.ArgumentDefaultsHelpFormatter
parser = argparse.ArgumentParser(description=description,
formatter_class=formatter_class)
+ add_common_pycbc_options(parser)
parser.add_argument("--version", action="version", version=version)
- parser.add_argument("-v", "--verbose", default=False, action="store_true",
- help="Verbose output")
parser.add_argument("-o", "--output-file", default=None,
help="Output file.")
parser.add_argument("--x-lims", action="store", default=None,
@@ -496,7 +499,7 @@ def extract_basic_trig_properties(trial_dict, trigs, slide_dict, seg_dict,
# Sort the triggers into each slide
sorted_trigs = sort_trigs(trial_dict, trigs, slide_dict, seg_dict)
- logging.info("Triggers sorted.")
+ logger.info("Triggers sorted.")
# Build the 3 dictionaries
trig_time = {}
@@ -518,7 +521,7 @@ def extract_basic_trig_properties(trial_dict, trigs, slide_dict, seg_dict,
trigs['network/reweighted_snr'][indices],
opts.newsnr_threshold)
- logging.info("Time, SNR, and BestNR of triggers extracted.")
+ logger.info("Time, SNR, and BestNR of triggers extracted.")
return trig_time, trig_snr, trig_bestnr
@@ -548,7 +551,7 @@ def extract_ifos(trig_file):
def extract_ifos_and_vetoes(trig_file, veto_files, veto_cat):
"""Extracts IFOs from HDF files and vetoes from a directory"""
- logging.info("Extracting IFOs and vetoes.")
+ logger.info("Extracting IFOs and vetoes.")
# Extract IFOs
ifos = extract_ifos(trig_file)
diff --git a/pycbc/results/versioning.py b/pycbc/results/versioning.py
index 529b39eeb91..d4b3863666f 100644
--- a/pycbc/results/versioning.py
+++ b/pycbc/results/versioning.py
@@ -20,8 +20,11 @@
import lal
import lalframe
+
import pycbc.version
+logger = logging.getLogger('pycbc.results.versioning')
+
def get_library_version_info():
"""This will return a list of dictionaries containing versioning
information about the various LIGO libraries that PyCBC will use in an
@@ -115,7 +118,7 @@ def get_code_version_numbers(executable_names, executable_files):
code_version_dict = {}
for exe_name, value in zip(executable_names, executable_files):
value = urllib.parse.urlparse(value)
- logging.info("Getting version info for %s", exe_name)
+ logger.info("Getting version info for %s", exe_name)
version_string = None
if value.scheme in ['gsiftp', 'http', 'https']:
code_version_dict[exe_name] = "Using bundle downloaded from %s" % value
diff --git a/pycbc/scheme.py b/pycbc/scheme.py
index 100753fd971..e6eb6f87f86 100644
--- a/pycbc/scheme.py
+++ b/pycbc/scheme.py
@@ -32,6 +32,8 @@
import logging
from .libutils import get_ctypes_library
+logger = logging.getLogger('pycbc.scheme')
+
class _SchemeManager(object):
_single = None
@@ -279,7 +281,7 @@ def from_cli(opt):
name = scheme_str[0]
if name == "cuda":
- logging.info("Running with CUDA support")
+ logger.info("Running with CUDA support")
ctx = CUDAScheme(opt.processing_device_id)
elif name == "mkl":
if len(scheme_str) > 1:
@@ -289,7 +291,7 @@ def from_cli(opt):
ctx = MKLScheme(num_threads=numt)
else:
ctx = MKLScheme()
- logging.info("Running with MKL support: %s threads" % ctx.num_threads)
+ logger.info("Running with MKL support: %s threads" % ctx.num_threads)
else:
if len(scheme_str) > 1:
numt = scheme_str[1]
@@ -298,7 +300,7 @@ def from_cli(opt):
ctx = CPUScheme(num_threads=numt)
else:
ctx = CPUScheme()
- logging.info("Running with CPU support: %s threads" % ctx.num_threads)
+ logger.info("Running with CPU support: %s threads" % ctx.num_threads)
return ctx
def verify_processing_options(opt, parser):
diff --git a/pycbc/sensitivity.py b/pycbc/sensitivity.py
index 735983f6a05..b855952290e 100644
--- a/pycbc/sensitivity.py
+++ b/pycbc/sensitivity.py
@@ -1,9 +1,13 @@
""" This module contains utilities for calculating search sensitivity
"""
import numpy
+import logging
+
from pycbc.conversions import chirp_distance
from . import bin_utils
+logger = logging.getLogger('pycbc.sensitivity')
+
def compute_search_efficiency_in_bins(
found, total, ndbins,
diff --git a/pycbc/strain/strain.py b/pycbc/strain/strain.py
index d4ed74befeb..4039efe15aa 100644
--- a/pycbc/strain/strain.py
+++ b/pycbc/strain/strain.py
@@ -17,8 +17,12 @@
This modules contains functions reading, generating, and segmenting strain data
"""
import copy
-import logging, numpy
+import logging
import functools
+import numpy
+
+from scipy.signal import kaiserord
+
import pycbc.types
from pycbc.types import TimeSeries, zeros
from pycbc.types import Array, FrequencySeries
@@ -37,7 +41,8 @@
import pycbc.events
import pycbc.frame
import pycbc.filter
-from scipy.signal import kaiserord
+
+logger = logging.getLogger('pycbc.strain.strain')
def next_power_of_2(n):
"""Return the smallest integer power of 2 larger than the argument.
@@ -207,7 +212,7 @@ def from_cli(opt, dyn_range_fac=1, precision='single',
if opt.frame_files:
frame_source = opt.frame_files
- logging.info("Reading Frames")
+ logger.info("Reading Frames")
if hasattr(opt, 'frame_sieve') and opt.frame_sieve:
sieve = opt.frame_sieve
@@ -232,7 +237,7 @@ def from_cli(opt, dyn_range_fac=1, precision='single',
opt.gps_end_time + opt.pad_data)
elif opt.fake_strain or opt.fake_strain_from_file:
- logging.info("Generating Fake Strain")
+ logger.info("Generating Fake Strain")
duration = opt.gps_end_time - opt.gps_start_time
duration += 2 * opt.pad_data
pdf = 1.0 / opt.fake_strain_filter_duration
@@ -241,23 +246,23 @@ def from_cli(opt, dyn_range_fac=1, precision='single',
fake_extra_args = opt.fake_strain_extra_args
plen = round(opt.sample_rate / pdf) // 2 + 1
if opt.fake_strain_from_file:
- logging.info("Reading ASD from file")
+ logger.info("Reading ASD from file")
strain_psd = pycbc.psd.from_txt(opt.fake_strain_from_file,
plen, pdf,
fake_flow,
is_asd_file=True)
elif opt.fake_strain != 'zeroNoise':
- logging.info("Making PSD for strain")
+ logger.info("Making PSD for strain")
strain_psd = pycbc.psd.from_string(opt.fake_strain, plen, pdf,
fake_flow, **fake_extra_args)
if opt.fake_strain == 'zeroNoise':
- logging.info("Making zero-noise time series")
+ logger.info("Making zero-noise time series")
strain = TimeSeries(pycbc.types.zeros(duration * fake_rate),
delta_t=1.0 / fake_rate,
epoch=opt.gps_start_time - opt.pad_data)
else:
- logging.info("Making colored noise")
+ logger.info("Making colored noise")
from pycbc.noise.reproduceable import colored_noise
strain = colored_noise(strain_psd,
opt.gps_start_time - opt.pad_data,
@@ -282,32 +287,32 @@ def from_cli(opt, dyn_range_fac=1, precision='single',
'simulated signals into fake strain')
if opt.zpk_z and opt.zpk_p and opt.zpk_k:
- logging.info("Highpass Filtering")
+ logger.info("Highpass Filtering")
strain = highpass(strain, frequency=opt.strain_high_pass)
- logging.info("Applying zpk filter")
+ logger.info("Applying zpk filter")
z = numpy.array(opt.zpk_z)
p = numpy.array(opt.zpk_p)
k = float(opt.zpk_k)
strain = filter_zpk(strain.astype(numpy.float64), z, p, k)
if opt.normalize_strain:
- logging.info("Dividing strain by constant")
+ logger.info("Dividing strain by constant")
l = opt.normalize_strain
strain = strain / l
if opt.strain_high_pass:
- logging.info("Highpass Filtering")
+ logger.info("Highpass Filtering")
strain = highpass(strain, frequency=opt.strain_high_pass)
if opt.sample_rate:
- logging.info("Resampling data")
+ logger.info("Resampling data")
strain = resample_to_delta_t(strain,
1. / opt.sample_rate,
method='ldas')
if injector is not None:
- logging.info("Applying injections")
+ logger.info("Applying injections")
injections = \
injector.apply(strain, opt.channel_name.split(':')[0],
distance_scale=opt.injection_scale_factor,
@@ -315,22 +320,22 @@ def from_cli(opt, dyn_range_fac=1, precision='single',
inj_filter_rejector=inj_filter_rejector)
if opt.sgburst_injection_file:
- logging.info("Applying sine-Gaussian burst injections")
+ logger.info("Applying sine-Gaussian burst injections")
injector = SGBurstInjectionSet(opt.sgburst_injection_file)
injector.apply(strain, opt.channel_name.split(':')[0],
distance_scale=opt.injection_scale_factor)
if precision == 'single':
- logging.info("Converting to float32")
+ logger.info("Converting to float32")
strain = (strain * dyn_range_fac).astype(pycbc.types.float32)
elif precision == "double":
- logging.info("Converting to float64")
+ logger.info("Converting to float64")
strain = (strain * dyn_range_fac).astype(pycbc.types.float64)
else:
raise ValueError("Unrecognized precision {}".format(precision))
if opt.gating_file is not None:
- logging.info("Gating times contained in gating file")
+ logger.info("Gating times contained in gating file")
gate_params = numpy.loadtxt(opt.gating_file)
if len(gate_params.shape) == 1:
gate_params = [gate_params]
@@ -361,18 +366,18 @@ def from_cli(opt, dyn_range_fac=1, precision='single',
copy=False,
taper_width=gate_taper)
if len(glitch_times) > 0:
- logging.info('Autogating at %s',
- ', '.join(['%.3f' % gt
- for gt in glitch_times]))
+ logger.info('Autogating at %s',
+ ', '.join(['%.3f' % gt
+ for gt in glitch_times]))
else:
break
if opt.strain_high_pass:
- logging.info("Highpass Filtering")
+ logger.info("Highpass Filtering")
strain = highpass(strain, frequency=opt.strain_high_pass)
if opt.strain_low_pass:
- logging.info("Lowpass Filtering")
+ logger.info("Lowpass Filtering")
strain = lowpass(strain, frequency=opt.strain_low_pass)
if hasattr(opt, 'witness_frame_type') and opt.witness_frame_type:
@@ -404,13 +409,13 @@ def from_cli(opt, dyn_range_fac=1, precision='single',
strain = stilde.to_timeseries()
if opt.pad_data:
- logging.info("Remove Padding")
+ logger.info("Remove Padding")
start = int(opt.pad_data * strain.sample_rate)
end = int(len(strain) - strain.sample_rate * opt.pad_data)
strain = strain[start:end]
if opt.taper_data:
- logging.info("Tapering data")
+ logger.info("Tapering data")
# Use auto-gating, a one-sided gate is a taper
pd_taper_window = opt.taper_data
gate_params = [(strain.start_time, 0., pd_taper_window)]
@@ -1557,8 +1562,8 @@ def __init__(self, frame_src, channel_name, start_time,
# State channel
if state_channel is not None:
valid_mask = pycbc.frame.flag_names_to_bitmask(self.analyze_flags)
- logging.info('State channel %s interpreted as bitmask %s = good',
- state_channel, bin(valid_mask))
+ logger.info('State channel %s interpreted as bitmask %s = good',
+ state_channel, bin(valid_mask))
self.state = pycbc.frame.StatusBuffer(
frame_src,
state_channel, start_time,
@@ -1575,12 +1580,12 @@ def __init__(self, frame_src, channel_name, start_time,
if len(self.data_quality_flags) == 1 \
and self.data_quality_flags[0] == 'veto_nonzero':
sb_kwargs['valid_on_zero'] = True
- logging.info('DQ channel %s interpreted as zero = good',
- data_quality_channel)
+ logger.info('DQ channel %s interpreted as zero = good',
+ data_quality_channel)
else:
sb_kwargs['valid_mask'] = pycbc.frame.flag_names_to_bitmask(
self.data_quality_flags)
- logging.info(
+ logger.info(
'DQ channel %s interpreted as bitmask %s = good',
data_quality_channel,
bin(sb_kwargs['valid_mask'])
@@ -1698,15 +1703,15 @@ def recalculate_psd(self):
# If the new psd is similar to the old one, don't replace it
if self.psd and self.psd_recalculate_difference:
if abs(self.psd.dist - psd.dist) / self.psd.dist < self.psd_recalculate_difference:
- logging.info("Skipping recalculation of %s PSD, %s-%s",
- self.detector, self.psd.dist, psd.dist)
+ logger.info("Skipping recalculation of %s PSD, %s-%s",
+ self.detector, self.psd.dist, psd.dist)
return True
# If the new psd is *really* different than the old one, return an error
if self.psd and self.psd_abort_difference:
if abs(self.psd.dist - psd.dist) / self.psd.dist > self.psd_abort_difference:
- logging.info("%s PSD is CRAZY, aborting!!!!, %s-%s",
- self.detector, self.psd.dist, psd.dist)
+ logger.info("%s PSD is CRAZY, aborting!!!!, %s-%s",
+ self.detector, self.psd.dist, psd.dist)
self.psd = psd
self.psds = {}
return False
@@ -1714,7 +1719,7 @@ def recalculate_psd(self):
# If the new estimate replaces the current one, invalide the ineterpolate PSDs
self.psd = psd
self.psds = {}
- logging.info("Recalculating %s PSD, %s", self.detector, psd.dist)
+ logger.info("Recalculating %s PSD, %s", self.detector, psd.dist)
return True
def check_psd_dist(self, min_dist, max_dist):
@@ -1729,7 +1734,7 @@ def check_psd_dist(self, min_dist, max_dist):
# with how the logic works out when comparing inf's or nan's!
good = self.psd.dist >= min_dist and self.psd.dist <= max_dist
if not good:
- logging.info(
+ logger.info(
"%s PSD dist %s outside acceptable range [%s, %s]",
self.detector,
self.psd.dist,
@@ -1865,7 +1870,7 @@ def advance(self, blocksize, timeout=10):
# We have given up so there is no time series
if ts is None:
- logging.info("%s frame is late, giving up", self.detector)
+ logger.info("%s frame is late, giving up", self.detector)
self.null_advance_strain(blocksize)
if self.state:
self.state.null_advance(blocksize)
@@ -1887,8 +1892,8 @@ def advance(self, blocksize, timeout=10):
self.dq.null_advance(blocksize)
if self.idq:
self.idq.null_advance(blocksize)
- logging.info("%s time has invalid data, resetting buffer",
- self.detector)
+ logger.info("%s time has invalid data, resetting buffer",
+ self.detector)
return False
# Also advance the dq vector and idq timeseries in lockstep
@@ -1921,7 +1926,7 @@ def advance(self, blocksize, timeout=10):
# taper beginning if needed
if self.taper_immediate_strain:
- logging.info("Tapering start of %s strain block", self.detector)
+ logger.info("Tapering start of %s strain block", self.detector)
strain = gate_data(
strain, [(strain.start_time, 0., self.autogating_taper)])
self.taper_immediate_strain = False
@@ -1943,8 +1948,8 @@ def advance(self, blocksize, timeout=10):
low_freq_cutoff=self.highpass_frequency,
corrupt_time=self.autogating_pad)
if len(glitch_times) > 0:
- logging.info('Autogating %s at %s', self.detector,
- ', '.join(['%.3f' % gt for gt in glitch_times]))
+ logger.info('Autogating %s at %s', self.detector,
+ ', '.join(['%.3f' % gt for gt in glitch_times]))
self.gate_params = \
[(gt, self.autogating_width, self.autogating_taper)
for gt in glitch_times]
diff --git a/pycbc/transforms.py b/pycbc/transforms.py
index 487de5ab103..37bd9ddf72b 100644
--- a/pycbc/transforms.py
+++ b/pycbc/transforms.py
@@ -19,6 +19,7 @@
import os
import logging
import numpy
+
from pycbc import conversions
from pycbc import coordinates
from pycbc import cosmology
@@ -28,6 +29,8 @@
from pycbc import VARARGS_DELIM
from pycbc.pnutils import jframe_to_l0frame
+logger = logging.getLogger('pycbc.transforms')
+
class BaseTransform(object):
"""A base class for transforming between two sets of parameters."""
@@ -867,7 +870,7 @@ def inverse_transform(self, maps):
class SphericalSpin1ToCartesianSpin1(SphericalToCartesian):
"""Converts spherical spin parameters (radial and two angles) to
- catesian spin parameters. This class only transforms spsins for the first
+ catesian spin parameters. This class only transforms spins for the first
component mass.
**Deprecation Warning:** This will be removed in a future update. Use
@@ -878,13 +881,12 @@ class SphericalSpin1ToCartesianSpin1(SphericalToCartesian):
name = "spherical_spin_1_to_cartesian_spin_1"
def __init__(self):
- logging.warning(
- "Deprecation warning: the {} transform will be "
- "removed in a future update. Please use {} instead, "
+ logger.warning(
+ "Deprecation warning: the %s transform will be "
+ "removed in a future update. Please use %s instead, "
"passing spin1x, spin1y, spin1z, spin1_a, "
- "spin1_azimuthal, spin1_polar as arguments.".format(
- self.name, SphericalToCartesian.name
- )
+ "spin1_azimuthal, spin1_polar as arguments.",
+ self.name, SphericalToCartesian.name
)
super(SphericalSpin1ToCartesianSpin1, self).__init__(
"spin1x", "spin1y", "spin1z", "spin1_a",
@@ -894,7 +896,7 @@ def __init__(self):
class SphericalSpin2ToCartesianSpin2(SphericalToCartesian):
"""Converts spherical spin parameters (radial and two angles) to
- catesian spin parameters. This class only transforms spsins for the first
+ catesian spin parameters. This class only transforms spins for the first
component mass.
**Deprecation Warning:** This will be removed in a future update. Use
@@ -905,13 +907,12 @@ class SphericalSpin2ToCartesianSpin2(SphericalToCartesian):
name = "spherical_spin_2_to_cartesian_spin_2"
def __init__(self):
- logging.warning(
- "Deprecation warning: the {} transform will be "
- "removed in a future update. Please use {} instead, "
+ logger.warning(
+ "Deprecation warning: the %s transform will be "
+ "removed in a future update. Please use %s instead, "
"passing spin2x, spin2y, spin2z, spin2_a, "
- "spin2_azimuthal, spin2_polar as arguments.".format(
- self.name, SphericalToCartesian.name
- )
+ "spin2_azimuthal, spin2_polar as arguments.",
+ self.name, SphericalToCartesian.name
)
super(SphericalSpin2ToCartesianSpin2, self).__init__(
"spin2x", "spin2y", "spin2z",
@@ -1401,7 +1402,7 @@ def transform(self, maps):
try:
d = maps["distance"]
except KeyError as e:
- logging.warning(
+ logger.warning(
"Either provide distance samples in the "
"list of samples to be transformed, or "
"provide a fixed distance value as input "
@@ -2470,13 +2471,12 @@ class CartesianSpin1ToSphericalSpin1(CartesianToSpherical):
name = "cartesian_spin_1_to_spherical_spin_1"
def __init__(self):
- logging.warning(
- "Deprecation warning: the {} transform will be "
- "removed in a future update. Please use {} instead, "
+ logger.warning(
+ "Deprecation warning: the %s transform will be "
+ "removed in a future update. Please use %s instead, "
"passing spin1x, spin1y, spin1z, spin1_a, "
- "spin1_azimuthal, spin1_polar as arguments.".format(
- self.name, CartesianToSpherical.name
- )
+ "spin1_azimuthal, spin1_polar as arguments.",
+ self.name, CartesianToSpherical.name
)
super(CartesianSpin1ToSphericalSpin1, self).__init__(
"spin1x", "spin1y", "spin1z",
@@ -2495,13 +2495,12 @@ class CartesianSpin2ToSphericalSpin2(CartesianToSpherical):
name = "cartesian_spin_2_to_spherical_spin_2"
def __init__(self):
- logging.warning(
- "Deprecation warning: the {} transform will be "
- "removed in a future update. Please use {} instead, "
+ logger.warning(
+ "Deprecation warning: the %s transform will be "
+ "removed in a future update. Please use %s instead, "
"passing spin2x, spin2y, spin2z, spin2_a, "
- "spin2_azimuthal, spin2_polar as arguments.".format(
- self.name, CartesianToSpherical.name
- )
+ "spin2_azimuthal, spin2_polar as arguments.",
+ self.name, CartesianToSpherical.name
)
super(CartesianSpin2ToSphericalSpin2, self).__init__(
"spin2x", "spin2y", "spin2z",
diff --git a/pycbc/types/config.py b/pycbc/types/config.py
index 89d579c65de..c690c8f4906 100644
--- a/pycbc/types/config.py
+++ b/pycbc/types/config.py
@@ -32,6 +32,8 @@
from io import StringIO
import configparser as ConfigParser
+logger = logging.getLogger('pycbc.types.config')
+
class DeepCopyableConfigParser(ConfigParser.ConfigParser):
"""
@@ -132,7 +134,7 @@ def __init__(
"no such section in configuration." % delete
)
- logging.info(
+ logger.info(
"Deleting section %s from configuration", delete[0]
)
elif len(delete) == 2:
@@ -142,7 +144,7 @@ def __init__(
" no such option in configuration." % delete
)
- logging.info(
+ logger.info(
"Deleting option %s from section %s in " "configuration",
delete[1],
delete[0],
@@ -168,7 +170,7 @@ def __init__(
if not self.has_section(section):
self.add_section(section)
self.set(section, option, value)
- logging.info(
+ logger.info(
"Overriding section %s option %s with value %s "
"in configuration.",
section,
@@ -208,7 +210,7 @@ def from_cli(cls, opts):
The command line arguments parsed by argparse
"""
# read configuration file
- logging.info("Reading configuration file")
+ logger.info("Reading configuration file")
if opts.config_overrides is not None:
overrides = [
tuple(override.split(":", 2))