From a2f6d9020a48f39fcbaa06c0ca3ba552875d8adf Mon Sep 17 00:00:00 2001 From: Evgenia Karunus Date: Fri, 23 Jul 2021 14:36:37 +0500 Subject: [PATCH 01/13] Change all print statements to logger statements --- netpyne/analysis/__init__.py | 4 +- netpyne/analysis/csd.py | 29 +++--- netpyne/analysis/filter.py | 1 - netpyne/analysis/info.py | 11 +- netpyne/analysis/interactive.py | 46 ++++----- netpyne/analysis/lfp.py | 7 +- netpyne/analysis/network.py | 80 +++++++-------- netpyne/analysis/spikes.py | 45 ++++---- netpyne/analysis/traces.py | 9 +- netpyne/analysis/utils.py | 14 ++- netpyne/analysis/wrapper.py | 13 ++- netpyne/batch/__init__.py | 1 - netpyne/batch/asd_parallel.py | 91 ++++++++-------- netpyne/batch/batch.py | 8 +- netpyne/batch/evol.py | 56 +++++----- netpyne/batch/grid.py | 53 +++++----- netpyne/batch/optuna_parallel.py | 66 ++++++------ netpyne/batch/utils.py | 12 +-- netpyne/cell/NML2SpikeSource.py | 2 +- netpyne/cell/cell.py | 22 ++-- netpyne/cell/compartCell.py | 102 +++++++++--------- netpyne/cell/inputs.py | 6 +- netpyne/cell/pointCell.py | 52 +++++----- netpyne/conversion/excel.py | 6 +- netpyne/conversion/neuromlFormat.py | 154 +++++++++++++--------------- netpyne/conversion/neuronPyHoc.py | 20 ++-- netpyne/conversion/pythonScript.py | 6 +- netpyne/conversion/sonataImport.py | 77 +++++++------- netpyne/logger.py | 55 ++++++++++ netpyne/network/conn.py | 29 +++--- netpyne/network/modify.py | 19 ++-- netpyne/network/netrxd.py | 67 ++++++------ netpyne/network/network.py | 10 +- netpyne/network/pop.py | 27 +++-- netpyne/network/stim.py | 8 +- netpyne/network/subconn.py | 18 ++-- netpyne/sim/gather.py | 55 +++++----- netpyne/sim/load.py | 94 ++++++++--------- netpyne/sim/run.py | 35 +++---- netpyne/sim/save.py | 63 ++++++------ netpyne/sim/setup.py | 46 ++++----- netpyne/sim/utils.py | 22 ++-- netpyne/sim/wrappers.py | 3 +- netpyne/specs/netParams.py | 32 +++--- netpyne/specs/simConfig.py | 6 +- netpyne/support/bsmart.py | 3 +- netpyne/support/recxelectrode.py | 7 +- netpyne/support/scalebar.py | 1 - netpyne/support/stackedBarGraph.py | 4 +- 49 files changed, 806 insertions(+), 791 deletions(-) create mode 100644 netpyne/logger.py diff --git a/netpyne/analysis/__init__.py b/netpyne/analysis/__init__.py index 71eaf3dd6..079939e11 100644 --- a/netpyne/analysis/__init__.py +++ b/netpyne/analysis/__init__.py @@ -4,7 +4,6 @@ """ from __future__ import unicode_literals -from __future__ import print_function from __future__ import division from __future__ import absolute_import @@ -24,6 +23,7 @@ from .utils import exception, _showFigure, _saveFigData, getCellsInclude, getCellsIncludeTags, _roundFigures, \ _smooth1d, syncMeasure, invertDictMapping, checkAvailablePlots +from netpyne.logger import logger # ------------------------------------------------------------------------------------------------------------------- # Import connectivity-related functions @@ -70,7 +70,7 @@ try: from .interactive import iplotDipole, iplotDipoleSpectrogram, iplotDipolePSD, iplotRaster, iplotSpikeHist, iplotRatePSD, iplotTraces, iplotLFP, iplotConn, iplotRxDConcentration, iplot2Dnet, iplotSpikeStats, iplotGranger except: - print('Warning: could not import interactive plotting functions; make sure the "bokeh" package is installed.') + logger.warning('Could not import interactive plotting functions; make sure the "bokeh" package is installed.') # ------------------------------------------------------------------------------------------------------------------- diff --git a/netpyne/analysis/csd.py b/netpyne/analysis/csd.py index 9c2e23f3f..77d6a4f93 100644 --- a/netpyne/analysis/csd.py +++ b/netpyne/analysis/csd.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -31,7 +30,7 @@ from scipy.signal import cheb2ord, cheby2, convolve, get_window, iirfilter, remez, decimate from .filter import lowpass,bandpass from .utils import exception, _saveFigData - +from netpyne.logger import logger def getbandpass(lfps, sampr, minf=0.05, maxf=300): """ @@ -320,7 +319,7 @@ def getCSD(LFP_input_data=None, LFP_input_file=None, sampr=None, dt=None, spacin sim.allSimData['CSD']['CSD_data'] = CSD_data sim.allSimData['CSD']['CSD_data_noBandpass'] = CSD_data_noBandpass except: - print('NOTE: No sim.allSimData construct available to store CSD data.') + logger.warning('No sim.allSimData construct available to store CSD data.') # return CSD_data or all data if getAllData is True: @@ -400,8 +399,8 @@ def plotCSD(CSD_data=None, LFP_input_data=None, overlay=None, timeRange=None, sa """ - print('Plotting CSD... ') - + logger.info('Plotting CSD...') + # DEFAULT -- CONDITION 1 : GET CSD DATA FROM SIM if CSD_data is None: @@ -439,24 +438,24 @@ def plotCSD(CSD_data=None, LFP_input_data=None, overlay=None, timeRange=None, sa # CONDITION 2 : ARBITRARY CSD DATA elif CSD_data is not None: if timeRange is None: - print('MUST PROVIDE TIME RANGE in ms') + logger.warning('Must provide time range in in ms') else: - print('timeRange = ' + str(timeRange)) + logger.info('timeRange = ' + str(timeRange)) if dt is None: - print('MUST PROVIDE dt in ms') + logger.warning('Must provide dt in ms') else: - print('dt = ' + str(dt)) # batch0['simConfig']['recordStep'] + logger.info('dt = ' + str(dt)) # batch0['simConfig']['recordStep'] if spacing_um is None: - print('MUST PROVIDE SPACING BETWEEN ELECTRODES in MICRONS') + logger.warning('Must provide spacing between electrodes in microns') else: - print('spacing_um = ' + str(spacing_um)) + logger.info('spacing_um = ' + str(spacing_um)) if ymax is None: - print('MUST PROVIDE YMAX (MAX DEPTH) in MICRONS') + logger.warning('Must provide ymax (max depth) in microns') else: - print('ymax = ' + str(ymax)) + logger.info('ymax = ' + str(ymax)) tt = np.arange(timeRange[0], timeRange[1], dt) LFP_data = np.array(LFP_input_data)[int(timeRange[0]/dt):int(timeRange[1]/dt),:] @@ -538,7 +537,7 @@ def plotCSD(CSD_data=None, LFP_input_data=None, overlay=None, timeRange=None, sa subaxs[chan].plot(X, LFP_data[:,chan], color='gray', linewidth=0.3) else: - print('No data being overlaid') + logger.info('No data being overlaid') axs[0].set_title('Current Source Density (CSD)', fontsize=fontSize) @@ -550,7 +549,7 @@ def plotCSD(CSD_data=None, LFP_input_data=None, overlay=None, timeRange=None, sa if layer_lines: if layer_bounds is None: - print('No layer boundaries given') + logger.info('No layer boundaries given') else: layerKeys = [] for i in layer_bounds.keys(): diff --git a/netpyne/analysis/filter.py b/netpyne/analysis/filter.py index 5f15147fb..4cec26b32 100644 --- a/netpyne/analysis/filter.py +++ b/netpyne/analysis/filter.py @@ -21,7 +21,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import diff --git a/netpyne/analysis/info.py b/netpyne/analysis/info.py index 43325cb20..dcb998f7c 100644 --- a/netpyne/analysis/info.py +++ b/netpyne/analysis/info.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -22,7 +21,7 @@ import matplotlib.pyplot as plt import numpy as np from .utils import exception, _saveFigData, _showFigure, getCellsInclude - +from netpyne.logger import logger # ------------------------------------------------------------------------------------------------------------------- ## Calculate normalized transfer entropy @@ -92,13 +91,13 @@ def nTE(cells1=[], cells2=[], spks1=None, spks2=None, timeRange=None, binSize=20 if 'nte' not in dir(h): try: - print(' Warning: support/nte.mod not compiled; attempting to compile from %s via "nrnivmodl support"'%(root)) + logger.warning('support/nte.mod not compiled; attempting to compile from %s via "nrnivmodl support"'%(root)) os.system('cd ' + root + '; nrnivmodl support') from neuron import load_mechanisms load_mechanisms(root) - print(' Compilation of support folder mod files successful') + logger.info('Compilation of support folder mod files successful') except: - print(' Error compiling support folder mod files') + logger.warning('Error compiling support folder mod files') return h.load_file(root+'/support/nte.hoc') # nTE code (also requires support/net.mod) @@ -358,7 +357,7 @@ def plotGranger(cells1=None, cells2=None, spks1=None, spks2=None, label1=None, l try: from statsmodels.tsa.stattools import grangercausalitytests as gt except: - print('To test Granger results please install the statsmodel package: "pip install statsmodel"') + logger.warning('To test Granger results please install the statsmodel package: "pip install statsmodel"') exit() tests = gt(np.array([histoCount1, histoCount2]).T, maxlag=10) diff --git a/netpyne/analysis/interactive.py b/netpyne/analysis/interactive.py index 7febe6a12..a72b051af 100644 --- a/netpyne/analysis/interactive.py +++ b/netpyne/analysis/interactive.py @@ -4,7 +4,6 @@ """ from __future__ import unicode_literals -from __future__ import print_function from __future__ import division from __future__ import absolute_import @@ -32,6 +31,7 @@ from bokeh.palettes import Viridis256 from bokeh.models import HoverTool +from netpyne.logger import logger def applyTheme(kwargs): theme = None @@ -171,7 +171,7 @@ def iplotRaster(include=['allCells'], timeRange=None, maxSpikes=1e8, orderBy='gi from bokeh.colors import RGB from bokeh.models.annotations import Title - print('Plotting interactive raster ...') + logger.info('Plotting interactive raster...') theme = applyTheme(kwargs) @@ -218,7 +218,7 @@ def iplotRaster(include=['allCells'], timeRange=None, maxSpikes=1e8, orderBy='gi sel, spkts, spkgids = getSpktSpkid(cellGids=[] if include == ['allCells'] else cellGids, timeRange=timeRange) except: import sys - print((sys.exc_info())) + logger.warning(sys.exc_info()) spkgids, spkts = [], [] sel = pd.DataFrame(columns=['spkt', 'spkid']) sel['spkgidColor'] = sel['spkid'].map(gidColors) @@ -252,14 +252,13 @@ def iplotRaster(include=['allCells'], timeRange=None, maxSpikes=1e8, orderBy='gi numNetStims += 1 else: pass - #print netStimLabel+' produced no spikes' if len(cellGids)>0 and numNetStims: ylabelText = ylabelText + ' and NetStims (at the end)' elif numNetStims: ylabelText = ylabelText + 'NetStims' if numCellSpks+numNetStims == 0: - print('No spikes available to plot raster') + logger.warning('No spikes available to plot raster') return None # Time Range @@ -272,7 +271,7 @@ def iplotRaster(include=['allCells'], timeRange=None, maxSpikes=1e8, orderBy='gi # Limit to max spikes if (len(sel)>maxSpikes): - print((' Showing only the first %i out of %i spikes' % (maxSpikes, len(sel)))) # Limit num of spikes + logger.info(' Showing only the first %i out of %i spikes' % (maxSpikes, len(sel))) # Limit num of spikes if numNetStims: # sort first if have netStims sel = sel.sort_values(by='spkt') sel = sel.iloc[:maxSpikes] @@ -321,7 +320,6 @@ def iplotRaster(include=['allCells'], timeRange=None, maxSpikes=1e8, orderBy='gi if syncLines: for spkt in sel['spkt'].tolist(): fig.line((spkt, spkt), (0, len(cells)+numNetStims), color='red', line_width=2) - print(syncMeasure()) t.text = 'cells=%i syns/cell=%0.1f rate=%0.1f Hz sync=%0.2f' % (numCells,connsPerCell,firingRate,syncMeasure()) else: t.text = 'cells=%i syns/cell=%0.1f rate=%0.1f Hz' % (numCells,connsPerCell,firingRate) @@ -967,7 +965,7 @@ def iplotSpikeHist(include = ['allCells', 'eachPop'], legendLabels = [], timeRan from bokeh.models import Legend from bokeh.colors import RGB - print('Plotting interactive spike histogram...') + logger.info('Plotting interactive spike histogram...') theme = applyTheme(kwargs) @@ -1085,7 +1083,7 @@ def iplotSpikeHist(include = ['allCells', 'eachPop'], legendLabels = [], timeRan fig.legend.click_policy='hide' fig.legend.location='top_right' - print(figs) + logger.info(figs) plot_layout = gridplot(figs, ncols=1, merge_tools=False, sizing_mode='stretch_both') html = file_html(plot_layout, CDN, title="Spike Histogram", theme=theme) @@ -1191,7 +1189,7 @@ def iplotRatePSD(include=['allCells', 'eachPop'], timeRange=None, binSize=5, max from bokeh.colors import RGB from bokeh.models import Legend - print('Plotting interactive firing rate power spectral density (PSD) ...') + logger.info('Plotting interactive firing rate power spectral density (PSD) ...') theme = applyTheme(kwargs) @@ -1407,7 +1405,7 @@ def iplotTraces(include=None, timeRange=None, overlay=False, oneFigPer='cell', r from bokeh.models import Legend from bokeh.colors import RGB - print('Plotting interactive recorded cell traces per', oneFigPer) + logger.info('Plotting interactive recorded cell traces per ' + oneFigPer) theme = applyTheme(kwargs) @@ -1736,7 +1734,7 @@ def iplotLFP(electrodes=['avg', 'all'], plots=['timeSeries', 'PSD', 'spectrogram from bokeh.layouts import layout, column, row from bokeh.colors import RGB - print('Plotting interactive LFP ...') + logger.info('Plotting interactive LFP ...') html = None theme = applyTheme(kwargs) @@ -2137,7 +2135,7 @@ def iplotConn(includePre=['all'], includePost=['all'], feature='strength', order from bokeh.layouts import layout from bokeh.colors import RGB - print('Plotting interactive connectivity matrix...') + logger.info('Plotting interactive connectivity matrix...') theme = applyTheme(kwargs) @@ -2148,7 +2146,7 @@ def iplotConn(includePre=['all'], includePost=['all'], feature='strength', order if connMatrix is None: - print(" Error calculating connMatrix in iplotConn()") + logger.info(" Error calculating connMatrix in iplotConn()") return None # TODO: set plot font size in Bokeh @@ -2253,11 +2251,11 @@ def iplotConn(includePre=['all'], includePost=['all'], feature='strength', order fig.yaxis.axis_label = feature elif groupBy == 'cell': - print(' Error: plotConn graphType="bar" with groupBy="cell" not yet implemented') + logger.warning(' Error: plotConn graphType="bar" with groupBy="cell" not yet implemented') return None elif graphType == 'pie': - print(' Error: plotConn graphType="pie" not yet implemented') + logger.warning(' Error: plotConn graphType="pie" not yet implemented') return None plot_layout = layout([fig], sizing_mode='stretch_both') @@ -2375,7 +2373,7 @@ def iplot2Dnet(include=['allCells'], view='xy', showConns=True, popColors=None, from bokeh.colors import RGB from bokeh.models.annotations import Title - print('Plotting interactive 2D representation of network cell locations and connections...') + logger.info('Plotting interactive 2D representation of network cell locations and connections...') theme = applyTheme(kwargs) @@ -2393,7 +2391,7 @@ def iplot2Dnet(include=['allCells'], view='xy', showConns=True, popColors=None, ycoord = 'z' if tagsFile: - print('Loading tags file...') + logger.info('Loading tags file...') import json with open(tagsFile, 'r') as fileObj: tagsTmp = json.load(fileObj)['tags'] tagsFormat = tagsTmp.pop('format', []) @@ -2407,8 +2405,8 @@ def iplot2Dnet(include=['allCells'], view='xy', showConns=True, popColors=None, yIndex = tagsFormat.index('y') if 'y' in tagsFormat else missing.append('y') zIndex = tagsFormat.index('z') if 'z' in tagsFormat else missing.append('z') if len(missing) > 0: - print("Missing:") - print(missing) + logger.info("Missing:") + logger.info(missing) return None, None, None # find pre and post cells @@ -2429,7 +2427,7 @@ def iplot2Dnet(include=['allCells'], view='xy', showConns=True, popColors=None, elif ycoord == 'z': posY = [tags[gid][zIndex] for gid in cellGids] # get all y positions else: - print('Error loading tags from file') + logger.warning('Error loading tags from file') return None else: @@ -2580,7 +2578,7 @@ def iplotRxDConcentration(speciesLabel, regionLabel, plane='xy', saveFig=None, s from bokeh.transform import linear_cmap from bokeh.models import ColorBar - print('Plotting interactive RxD concentration ...') + logger.info('Plotting interactive RxD concentration ...') theme = applyTheme(kwargs) @@ -2790,7 +2788,7 @@ def iplotSpikeStats(include=['eachPop', 'allCells'], statDataIn={}, timeRange=No from bokeh.palettes import Spectral6 from bokeh.models.mappers import CategoricalColorMapper - print('Plotting interactive spike statistics ...') + logger.info('Plotting interactive spike statistics ...') TOOLS = "pan,wheel_zoom,box_zoom,reset,save,box_select" @@ -2887,7 +2885,7 @@ def iplotSpikeStats(include=['eachPop', 'allCells'], statDataIn={}, timeRange=No try: import pyspike except: - print("Error: plotSpikeStats() requires the PySpike python package \ + logger.warning("Error: plotSpikeStats() requires the PySpike python package \ to calculate synchrony (try: pip install pyspike)") return 0 diff --git a/netpyne/analysis/lfp.py b/netpyne/analysis/lfp.py index f8c4df83c..6f680a2d1 100644 --- a/netpyne/analysis/lfp.py +++ b/netpyne/analysis/lfp.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -26,7 +25,7 @@ import numpy as np from numbers import Number from .utils import colorList, exception, _saveFigData, _showFigure, _smooth1d - +from netpyne.logger import logger # ------------------------------------------------------------------------------------------------------------------- ## Plot LFP (time-resolved, power spectral density, time-frequency and 3D locations) @@ -185,7 +184,7 @@ def plotLFP(timeRange=None, electrodes=['avg', 'all'], plots=['timeSeries', 'PSD from .. import sim from ..support.scalebar import add_scalebar - print('Plotting LFP ...') + logger.info('Plotting LFP ...') if not colors: colors = colorList @@ -530,7 +529,7 @@ def plotLFP(timeRange=None, electrodes=['avg', 'all'], plots=['timeSeries', 'PSD fontSize=fontSize, saveFig=saveFig, showFig=showFig, figSize=figSize)[0] figs.append(fig) except: - print(' Failed to plot LFP locations...') + logger.warning(' Failed to plot LFP locations...') diff --git a/netpyne/analysis/network.py b/netpyne/analysis/network.py index bf1957c93..823a8e956 100644 --- a/netpyne/analysis/network.py +++ b/netpyne/analysis/network.py @@ -29,6 +29,7 @@ from numbers import Number from .utils import colorList, exception, _roundFigures, getCellsInclude, getCellsIncludeTags from .utils import _saveFigData, _showFigure +from netpyne.logger import logger # ------------------------------------------------------------------------------------------------------------------- ## Support function for plotConn() - calculate conn using data from sim object @@ -45,7 +46,7 @@ def list_of_dict_unique_by_key(seq, key): try: return [x for x in seq if x[key] not in seen and not seen_add(x[key])] except: - print(' Error calculating list of dict unique by key...') + logger.warning(' Error calculating list of dict unique by key...') return [] # adapt indices/keys based on compact vs long conn format @@ -61,8 +62,8 @@ def list_of_dict_unique_by_key(seq, key): preLabelIndex = connsFormat.index('preLabel') if 'preLabel' in connsFormat else -1 if len(missing) > 0: - print(" Error: cfg.compactConnFormat missing:") - print(missing) + logger.warning(" Error: cfg.compactConnFormat missing:") + logger.warning(missing) return None, None, None else: # using long conn format (dict) @@ -87,7 +88,7 @@ def list_of_dict_unique_by_key(seq, key): connMatrix = np.zeros((len(cellGidsPre), len(cellGidsPost))) countMatrix = np.zeros((len(cellGidsPre), len(cellGidsPost))) else: - print(' Conn matrix with groupBy="cell" only supports features= "weight", "delay" or "numConns"') + logger.warning(' Conn matrix with groupBy="cell" only supports features= "weight", "delay" or "numConns"') return None, None, None cellIndsPre = {cell['gid']: ind for ind,cell in enumerate(cellsPre)} cellIndsPost = {cell['gid']: ind for ind,cell in enumerate(cellsPost)} @@ -250,7 +251,7 @@ def list_of_dict_unique_by_key(seq, key): # Calculate matrix if grouped by numeric tag (eg. 'y') elif groupBy in sim.net.allCells[0]['tags'] and isinstance(sim.net.allCells[0]['tags'][groupBy], Number): if not isinstance(groupByIntervalPre, Number) or not isinstance(groupByIntervalPost, Number): - print(' groupByIntervalPre or groupByIntervalPost not specified') + logger.warning(' groupByIntervalPre or groupByIntervalPost not specified') return None, None, None # group cells by 'groupBy' feature (eg. 'y') in intervals of 'groupByInterval') @@ -272,7 +273,7 @@ def list_of_dict_unique_by_key(seq, key): # only allow matrix sizes >= 2x2 [why?] # if len(groupsPre) < 2 or len(groupsPost) < 2: - # print 'groupBy %s with groupByIntervalPre %s and groupByIntervalPost %s results in <2 groups'%(str(groupBy), str(groupByIntervalPre), str(groupByIntervalPre)) + # logger.info 'groupBy %s with groupByIntervalPre %s and groupByIntervalPost %s results in <2 groups'%(str(groupBy), str(groupByIntervalPre), str(groupByIntervalPre)) # return # set indices for pre and post groups @@ -343,7 +344,7 @@ def list_of_dict_unique_by_key(seq, key): # no valid groupBy else: - print(' groupBy (%s) is not valid'%(str(groupBy))) + logger.warning(' groupBy (%s) is not valid'%(str(groupBy))) return None, None, None # normalize by number of postsyn cells @@ -395,19 +396,19 @@ def list_of_dict_unique_by_key(seq, index): start = time() tags, conns = None, None if tagsFile: - print('Loading tags file...') + logger.info('Loading tags file...') with open(tagsFile, 'r') as fileObj: tagsTmp = json.load(fileObj)['tags'] tagsFormat = tagsTmp.pop('format', []) tags = {int(k): v for k,v in tagsTmp.items()} # find method to load json with int keys? del tagsTmp if connsFile: - print('Loading conns file...') + logger.info('Loading conns file...') with open(connsFile, 'r') as fileObj: connsTmp = json.load(fileObj)['conns'] connsFormat = connsTmp.pop('format', []) conns = {int(k): v for k,v in connsTmp.items()} del connsTmp - print('Finished loading; total time (s): %.2f'%(time()-start)) + logger.info('Finished loading; total time (s): %.2f'%(time()-start)) # find pre and post cells if tags and conns: @@ -417,7 +418,7 @@ def list_of_dict_unique_by_key(seq, index): else: cellGidsPost = getCellsIncludeTags(includePost, tags, tagsFormat) else: - print('Error loading tags and conns from file') + logger.warning('Error loading tags and conns from file') return None, None, None @@ -431,22 +432,22 @@ def list_of_dict_unique_by_key(seq, index): preLabelIndex = connsFormat.index('preLabel') if 'preLabel' in connsFormat else -1 if len(missing) > 0: - print("Missing:") - print(missing) + logger.warning("Missing:") + logger.warning(missing) return None, None, None if isinstance(synMech, basestring): synMech = [synMech] # make sure synMech is a list # Calculate matrix if grouped by cell if groupBy == 'cell': - print(' plotConn from file for groupBy=cell not implemented yet') + logger.warning(' plotConn from file for groupBy=cell not implemented yet') return None, None, None # Calculate matrix if grouped by pop elif groupBy == 'pop': # get list of pops - print(' Obtaining list of populations ...') + logger.info(' Obtaining list of populations ...') popsPre = list(set([tags[gid][popIndex] for gid in cellGidsPre])) popIndsPre = {pop: ind for ind,pop in enumerate(popsPre)} netStimPopsPre = [] # netstims not yet supported @@ -467,7 +468,7 @@ def list_of_dict_unique_by_key(seq, index): countMatrix = np.zeros((len(popsPre), len(popsPost))) # calculate max num conns per pre and post pair of pops - print(' Calculating max num conns for each pair of population ...') + logger.info(' Calculating max num conns for each pair of population ...') numCellsPopPre = {} for pop in popsPre: if pop in netStimPopsPre: @@ -496,9 +497,9 @@ def list_of_dict_unique_by_key(seq, index): if feature == 'divergence': maxPreConnMatrix[popIndsPre[prePop], popIndsPost[postPop]] = numCellsPopPre[prePop] # Calculate conn matrix - print(' Calculating weights, strength, prob, delay etc matrices ...') + logger.info(' Calculating weights, strength, prob, delay etc matrices ...') for postGid in cellGidsPost: # for each postsyn cell - print(' cell %d'%(int(postGid))) + logger.info(' cell %d'%(int(postGid))) if synOrConn=='syn': cellConns = conns[postGid] # include all synapses else: @@ -525,12 +526,12 @@ def list_of_dict_unique_by_key(seq, index): # Calculate matrix if grouped by numeric tag (eg. 'y') elif groupBy in sim.net.allCells[0]['tags'] and isinstance(sim.net.allCells[0]['tags'][groupBy], Number): - print('plotConn from file for groupBy=[arbitrary property] not implemented yet') + logger.warning('plotConn from file for groupBy=[arbitrary property] not implemented yet') return None, None, None # no valid groupBy else: - print('groupBy (%s) is not valid'%(str(groupBy))) + logger.warning('groupBy (%s) is not valid'%(str(groupBy))) return None, None, None if groupBy != 'cell': @@ -557,7 +558,7 @@ def list_of_dict_unique_by_key(seq, index): elif feature == 'divergence': connMatrix = countMatrix / maxPreConnMatrix - print(' plotting ...') + logger.info(' plotting ...') return connMatrix, pre, post @@ -690,7 +691,7 @@ def plotConn(includePre=['all'], includePost=['all'], feature='strength', orderB from .. import sim - print('Plotting connectivity matrix...') + logger.info('Plotting connectivity matrix...') if groupBy == 'cell' and feature == 'strength': feature = 'weight' @@ -701,7 +702,7 @@ def plotConn(includePre=['all'], includePost=['all'], feature='strength', orderB connMatrix, pre, post = _plotConnCalculateFromSim(includePre, includePost, feature, orderBy, groupBy, groupByIntervalPre, groupByIntervalPost, synOrConn, synMech, removeWeightNorm, logPlot) if connMatrix is None: - print(" Error calculating connMatrix in plotConn()") + logger.warning(" Error calculating connMatrix in plotConn()") return None # set font size @@ -806,11 +807,11 @@ def plotConn(includePre=['all'], includePost=['all'], feature='strength', orderB plt.tight_layout() elif groupBy == 'cell': - print(' Error: plotConn graphType="bar" with groupBy="cell" not implemented') + logger.warning(' Error: plotConn graphType="bar" with groupBy="cell" not implemented') return None elif graphType == 'pie': - print(' Error: plotConn graphType="pie" not yet implemented') + logger.warning(' Error: plotConn graphType="pie" not yet implemented') return None @@ -919,7 +920,7 @@ def plot2Dnet(include=['allCells'], view='xy', showConns=True, popColors=None, t from .. import sim - print('Plotting 2D representation of network cell locations and connections...') + logger.info('Plotting 2D representation of network cell locations and connections...') fig = plt.figure(figsize=figSize) @@ -930,7 +931,7 @@ def plot2Dnet(include=['allCells'], view='xy', showConns=True, popColors=None, t ycoord = 'z' if tagsFile: - print('Loading tags file...') + logger.info('Loading tags file...') import json with open(tagsFile, 'r') as fileObj: tagsTmp = json.load(fileObj)['tags'] tagsFormat = tagsTmp.pop('format', []) @@ -944,8 +945,8 @@ def plot2Dnet(include=['allCells'], view='xy', showConns=True, popColors=None, t yIndex = tagsFormat.index('y') if 'y' in tagsFormat else missing.append('y') zIndex = tagsFormat.index('z') if 'z' in tagsFormat else missing.append('z') if len(missing) > 0: - print("Missing:") - print(missing) + logger.warning("Missing:") + logger.warning(missing) return None, None, None # find pre and post cells @@ -966,7 +967,7 @@ def plot2Dnet(include=['allCells'], view='xy', showConns=True, popColors=None, t elif ycoord == 'z': posY = [tags[gid][zIndex] for gid in cellGids] # get all y positions else: - print('Error loading tags from file') + logger.warning('Error loading tags from file') return None else: @@ -1109,7 +1110,7 @@ def plot2Dfiring(include=['allCells'], view='xy', popColors=None, timeRange=None from .. import sim from matplotlib import animation - print('Plotting 2D representation of network cell locations and connections...') + logger.info('Plotting 2D representation of network cell locations and connections...') fig = plt.figure(figsize=figSize) @@ -1326,7 +1327,7 @@ def plotShape(includePre=['all'], includePost=['all'], showSyns=False, showElect from .. import sim from neuron import h - print('Plotting 3D cell shape ...') + logger.info('Plotting 3D cell shape ...') cellsPreGids = [c.gid for c in sim.getCellsList(includePre)] if includePre else [] cellsPost = sim.getCellsList(includePost) @@ -1385,7 +1386,7 @@ def plotShape(includePre=['all'], includePost=['all'], showSyns=False, showElect cvals = np.array(cvals) if not isinstance(cellsPost[0].secs, dict): - print('Error: Cell sections not available') + logger.warning('Error: Cell sections not available') return -1 if not secs: secs = [s['hObj'] for cellPost in cellsPost for s in list(cellPost.secs.values())] @@ -1600,17 +1601,17 @@ def calculateDisynaptic(includePost = ['allCells'], includePre = ['allCells'], i start = time() if tagsFile: - print('Loading tags file...') + logger.info('Loading tags file...') with open(tagsFile, 'r') as fileObj: tagsTmp = json.load(fileObj)['tags'] tags = {int(k): v for k,v in tagsTmp.items()} del tagsTmp if connsFile: - print('Loading conns file...') + logger.info('Loading conns file...') with open(connsFile, 'r') as fileObj: connsTmp = json.load(fileObj)['conns'] conns = {int(k): v for k,v in connsTmp.items()} del connsTmp - print(' Calculating disynaptic connections...') + logger.info(' Calculating disynaptic connections...') # loading from json files if tags and conns: cellsPreGids = getCellsIncludeTags(includePre, tags) @@ -1632,7 +1633,7 @@ def calculateDisynaptic(includePost = ['allCells'], includePre = ['allCells'], i if 'preGid' in sim.cfg.compactConnFormat: preGidIndex = sim.cfg.compactConnFormat.index('preGid') # using compact conn format (list) else: - print(' Error: cfg.compactConnFormat does not include "preGid"') + logger.warning(' Error: cfg.compactConnFormat does not include "preGid"') return -1 else: preGidIndex = 'preGid' # using long conn format (dict) @@ -1642,7 +1643,6 @@ def calculateDisynaptic(includePost = ['allCells'], includePre = ['allCells'], i cellsPost, _, _ = getCellsInclude(includePost) for postCell in cellsPost: - print(postCell['gid']) preGidsAll = [conn[preGidIndex] for conn in postCell['conns'] if isinstance(conn[preGidIndex], Number) and conn[preGidIndex] in cellsPreGids+cellsPrePreGids] preGids = [gid for gid in preGidsAll if gid in cellsPreGids] for preGid in preGids: @@ -1652,13 +1652,13 @@ def calculateDisynaptic(includePost = ['allCells'], includePre = ['allCells'], i if not set(prePreGids).isdisjoint(preGidsAll): numDis += 1 - print(' Total disynaptic connections: %d / %d (%.2f%%)' % (numDis, totCon, float(numDis)/float(totCon)*100 if totCon>0 else 0.0)) + logger.info(' Total disynaptic connections: %d / %d (%.2f%%)' % (numDis, totCon, float(numDis)/float(totCon)*100 if totCon>0 else 0.0)) try: sim.allSimData['disynConns'] = numDis except: pass - print(' time ellapsed (s): ', time() - start) + logger.info(' time ellapsed (s): ', time() - start) return numDis diff --git a/netpyne/analysis/spikes.py b/netpyne/analysis/spikes.py index e9e0f0e4d..bcd0d0a9e 100644 --- a/netpyne/analysis/spikes.py +++ b/netpyne/analysis/spikes.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -32,7 +31,7 @@ import scipy from ..specs import Dict from .utils import colorList, exception, getCellsInclude, getSpktSpkid, _showFigure, _saveFigData, syncMeasure, _smooth1d - +from netpyne.logger import logger # ------------------------------------------------------------------------------------------------------------------- ## Calculate avg and peak rate of different subsets of cells for specific time period @@ -63,7 +62,7 @@ def calculateRate (include = ['allCells', 'eachPop'], peakBin = 5, timeRange = N from .. import sim - print('Calculating avg and peak firing rates ...') + logger.info('Calculating avg and peak firing rates ...') # Replace 'eachPop' with list of pops if 'eachPop' in include: @@ -329,9 +328,9 @@ def plotSyncs(include=['allCells', 'eachPop'], timeRanges=None, timeRangeLabels= timeRangeLabels = ['%f-%f ms'%(t[0], t[1]) for t in timeRanges] #['period '+i for i in range(len(timeRanges))] for i, timeRange in enumerate(timeRanges): - print(timeRange) - _, sync = sim.analysis.plotSpikeStats (include = include, timeRange = timeRange, stats = ['sync'], saveFig = False, showFig =False) - print(sync) + logger.info(timeRange) + _, sync = sim.analysis.plotSpikeStats(include = include, timeRange = timeRange, stats = ['sync'], saveFig = False, showFig = False) + logger.info(sync) sync = [s[0] for s in sync] syncs.append(sync) @@ -504,7 +503,7 @@ def plotRaster(include=['allCells'], timeRange=None, maxSpikes=1e8, orderBy='gid from .. import sim - print('Plotting raster...') + logger.info('Plotting raster...') # Select cells to include cells, cellGids, netStimLabels = getCellsInclude(include) @@ -545,7 +544,7 @@ def plotRaster(include=['allCells'], timeRange=None, maxSpikes=1e8, orderBy='gid sel, spkts, spkgids = getSpktSpkid(cellGids=[] if include == ['allCells'] else cellGids, timeRange=timeRange) # using [] is faster for all cells except: import sys - print((sys.exc_info())) + logger.warning(sys.exc_info()) spkgids, spkts = [], [] sel = pd.DataFrame(columns=['spkt', 'spkid']) sel['spkgidColor'] = sel['spkid'].map(gidColors) @@ -587,7 +586,7 @@ def plotRaster(include=['allCells'], timeRange=None, maxSpikes=1e8, orderBy='gid ylabelText = ylabelText + 'NetStims' if numCellSpks+numNetStims == 0: - print('No spikes available to plot raster') + logger.warning('No spikes available to plot raster') return None # Time Range @@ -601,7 +600,7 @@ def plotRaster(include=['allCells'], timeRange=None, maxSpikes=1e8, orderBy='gid # Limit to maxSpikes if (len(sel)>maxSpikes): - print((' Showing only the first %i out of %i spikes' % (maxSpikes, len(sel)))) # Limit num of spikes + logger.info(' Showing only the first %i out of %i spikes' % (maxSpikes, len(sel))) # Limit num of spikes if numNetStims: # sort first if have netStims sel = sel.sort_values(by='spkt') sel = sel.iloc[:maxSpikes] @@ -849,7 +848,7 @@ def plotSpikeHist(include=['eachPop', 'allCells'], timeRange=None, binSize=5, ov from .. import sim from ..support.scalebar import add_scalebar - print('Plotting spike histogram...') + logger.info('Plotting spike histogram...') # Replace 'eachPop' with list of pops if 'eachPop' in include: @@ -868,7 +867,7 @@ def plotSpikeHist(include=['eachPop', 'allCells'], timeRange=None, binSize=5, ov else: yaxisLabel = 'Spike count' else: - print('Invalid measure: %s', (measure)) + logging.warning('Invalid measure: %s' % measure) return # time range @@ -1151,7 +1150,7 @@ def plotSpikeStats(include=['eachPop', 'allCells'], statDataIn={}, timeRange=Non """ from .. import sim - print('Plotting spike stats...') + logger.info('Plotting spike stats...') # Set plot style colors = [] @@ -1268,7 +1267,7 @@ def plotSpikeStats(include=['eachPop', 'allCells'], statDataIn={}, timeRange=Non import pyspike import numpy as np except: - print("Error: plotSpikeStats() requires the PySpike python package \ + logger.warning("Error: plotSpikeStats() requires the PySpike python package \ to calculate synchrony (try: pip install pyspike)") return 0 @@ -1384,7 +1383,7 @@ def lognorm(meaninput, stdinput, binedges, n, popLabel, color): x = [(binedges[i]+binedges[i+1])/2.0 for i in range(len(binedges)-1)] #np.linspace(histmin, 30, num=400) # values for x-axis pdf = stats.lognorm.pdf(x, shape, loc=0, scale=scale) # probability distribution R, p = scipy.stats.pearsonr(n, pdf) - print(' Pop %s rate: mean=%f, std=%f, lognorm mu=%f, lognorm sigma=%f, R=%.2f (p-value=%.2f)' % (popLabel, M, s, mu, sigma, R, p)) + logger.info(' Pop %s rate: mean=%f, std=%f, lognorm mu=%f, lognorm sigma=%f, R=%.2f (p-value=%.2f)' % (popLabel, M, s, mu, sigma, R, p)) plt.semilogx(x, pdf, color=color, ls='dashed') return pdf @@ -1396,7 +1395,7 @@ def lognorm(meaninput, stdinput, binedges, n, popLabel, color): # check normality of distribution #W, p = scipy.stats.shapiro(data) - #print 'Pop %s rate: mean = %f, std = %f, normality (Shapiro-Wilk test) = %f, p-value = %f' % (include[i], mu, sigma, W, p) + #logger.info 'Pop %s rate: mean = %f, std = %f, normality (Shapiro-Wilk test) = %f, p-value = %f' % (include[i], mu, sigma, W, p) plt.xlabel(xlabel, fontsize=fontsiz) @@ -1595,7 +1594,7 @@ def plotRatePSD(include=['eachPop', 'allCells'], timeRange=None, binSize=5, minF from .. import sim - print('Plotting firing rate power spectral density (PSD) ...') + logger.info('Plotting firing rate power spectral density (PSD) ...') # Replace 'eachPop' with list of pops if 'eachPop' in include: @@ -1857,7 +1856,7 @@ def plotRateSpectrogram(include=['allCells', 'eachPop'], timeRange=None, binSize from .. import sim - print('Plotting firing rate spectrogram ...') + logger.info('Plotting firing rate spectrogram ...') # Replace 'eachPop' with list of pops if 'eachPop' in include: @@ -1992,7 +1991,7 @@ def popAvgRates(tranges = None, show = True): avgRates = Dict() if not hasattr(sim, 'allSimData') or 'spkt' not in sim.allSimData: - print('Error: sim.allSimData not available; please call sim.gatherData()') + logger.warning('Error: sim.allSimData not available; please call sim.gatherData()') return None spktsAll = sim.allSimData['spkt'] @@ -2025,7 +2024,7 @@ def popAvgRates(tranges = None, show = True): for pop in sim.net.allPops: if len(tranges) > 1: - print(' %s ' % (pop)) + logger.info(' %s ' % (pop)) avgRates[pop] = {} for spkids, spkts, trange in zip(spkidsList, spktsList, tranges): @@ -2036,13 +2035,13 @@ def popAvgRates(tranges = None, show = True): if len(tranges) == 1: tsecs = float((trange[1]-trange[0]))/1000.0 avgRates[pop] = len([spkid for spkid in spkids if sim.net.allCells[int(spkid)]['tags']['pop']==pop])/numCells/tsecs - print(' %s : %.3f Hz'%(pop, avgRates[pop])) + logger.info(' %s : %.3f Hz'%(pop, avgRates[pop])) # multiple time intervals else: tsecs = float((trange[1]-trange[0]))/1000.0 avgRates[pop]['%d_%d'%(trange[0], trange[1])] = len([spkid for spkid in spkids if sim.net.allCells[int(spkid)]['tags']['pop']==pop])/numCells/tsecs - print(' (%d - %d ms): %.3f Hz'%(trange[0], trange[1], avgRates[pop]['%d_%d'%(trange[0], trange[1])])) + logger.info(' (%d - %d ms): %.3f Hz'%(trange[0], trange[1], avgRates[pop]['%d_%d'%(trange[0], trange[1])])) return avgRates @@ -2055,7 +2054,7 @@ def calculatefI(): from .. import sim - print('Calculating f-I features...') + logger.info('Calculating f-I features...') times = sim.cfg.analysis['plotfI'].get('times', [0, sim.cfg.duration]) dur = sim.cfg.analysis['plotfI'].get('dur', sim.cfg.duration) diff --git a/netpyne/analysis/traces.py b/netpyne/analysis/traces.py index 4e459d2d0..07521050e 100644 --- a/netpyne/analysis/traces.py +++ b/netpyne/analysis/traces.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -17,6 +16,7 @@ from future import standard_library standard_library.install_aliases() from netpyne import __gui__ +from netpyne.logger import logger if __gui__: import matplotlib.pyplot as plt @@ -131,7 +131,7 @@ def plotTraces(include=None, timeRange=None, oneFigPer='cell', rerun=False, titl from .. import sim from ..support.scalebar import add_scalebar - print('Plotting recorded cell traces ...', oneFigPer) + logger.info('Plotting recorded cell traces per ' + oneFigPer) if include is None: # if none, record from whatever was recorded if 'plotTraces' in sim.cfg.analysis and 'include' in sim.cfg.analysis['plotTraces']: @@ -187,7 +187,6 @@ def plotFigPerTrace(subGids): figs['_trace_'+str(trace)] = plt.figure(figsize=figSize) # Open a new figure for igid, gid in enumerate(subGids): - # print('recordStep',recordStep) if 'cell_'+str(gid) in sim.allSimData[trace]: fullTrace = sim.allSimData[trace]['cell_'+str(gid)] if isinstance(fullTrace, dict): @@ -433,7 +432,7 @@ def plotEPSPAmp(include=None, trace=None, start=0, interval=50, number=2, amp='a from .. import sim - print('Plotting EPSP amplitudes...') + logger.info('Plotting EPSP amplitudes...') if include is None: include = [] # If not defined, initialize as empty list @@ -441,7 +440,7 @@ def plotEPSPAmp(include=None, trace=None, start=0, interval=50, number=2, amp='a gidPops = {cell['gid']: cell['tags']['pop'] for cell in cells} if not trace: - print('Error: Missing trace to to plot EPSP amplitudes') + logger.warning('Error: Missing trace to to plot EPSP amplitudes') return step = sim.cfg.recordStep diff --git a/netpyne/analysis/utils.py b/netpyne/analysis/utils.py index 684de8b49..830e46fab 100644 --- a/netpyne/analysis/utils.py +++ b/netpyne/analysis/utils.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -30,6 +29,7 @@ import numpy as np import functools import sys +from netpyne.logger import logger # ------------------------------------------------------------------------------------------------------------------- # Define list of colors @@ -70,9 +70,8 @@ def wrapper(*args, **kwargs): try: return function(*args, **kwargs) except Exception as e: - # print err = "There was an exception in %s():"%(function.__name__) - print((" %s \n %s \n %s"%(err,e,sys.exc_info()))) + logger.warning(" %s \n %s \n %s"%(err,e,sys.exc_info())) return -1 return wrapper @@ -105,15 +104,14 @@ def _saveFigData(figData, fileName=None, type=''): if fileName.endswith('.pkl'): # save to pickle import pickle - print(('Saving figure data as %s ... ' % (fileName))) + logger.info('Saving figure data as %s ... ' % fileName) with open(fileName, 'wb') as fileObj: pickle.dump(figData, fileObj) - elif fileName.endswith('.json'): # save to json - print(('Saving figure data as %s ... ' % (fileName))) + logger.info('Saving figure data as %s ... ' % fileName) sim.saveJSON(fileName, figData) else: - print('File extension to save figure data not recognized') + logger.warning('File extension to save figure data not recognized') # ------------------------------------------------------------------------------------------------------------------- @@ -167,7 +165,7 @@ def _smooth1d(x,window_len=11,window='hanning'): s=np.r_[x[window_len-1:0:-1],x,x[-1:-window_len:-1]] - #print(len(s)) + if window == 'flat': #moving average w=np.ones(window_len,'d') else: diff --git a/netpyne/analysis/wrapper.py b/netpyne/analysis/wrapper.py index 450164a30..acc6fcf07 100644 --- a/netpyne/analysis/wrapper.py +++ b/netpyne/analysis/wrapper.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import @@ -11,6 +10,7 @@ from future import standard_library standard_library.install_aliases() from netpyne import __gui__ +from netpyne.logger import logger try: from datetime import datetime @@ -38,20 +38,19 @@ def plotData(): if kwargs == True: kwargs = {} elif kwargs == False: continue func = getattr(sim.analysis, funcName) # get pointer to function - out = func(**kwargs) # call function with user arguments + out = func(**kwargs) # call function with user arguments # Print timings if sim.cfg.timing: - sim.timing('stop', 'plotTime') - print((' Done; plotting time = %0.2f s' % sim.timingData['plotTime'])) + logger.timing(' Done; plotting time = %0.2f s' % sim.timingData['plotTime']) sim.timing('stop', 'totalTime') sumTime = sum([t for k,t in sim.timingData.items() if k not in ['totalTime']]) - if sim.timingData['totalTime'] <= 1.2*sumTime: # Print total time (only if makes sense) - print(('\nTotal time = %0.2f s' % sim.timingData['totalTime'])) + if sim.timingData['totalTime'] <= 1.2*sumTime: # Print total time (only if makes sense) + logger.timing('\nTotal time = %0.2f s' % sim.timingData['totalTime']) try: - print('\nEnd time: ', datetime.now()) + logger.info('\nEnd time: ', datetime.now()) except: pass diff --git a/netpyne/batch/__init__.py b/netpyne/batch/__init__.py index 6d9c1b5af..9518874b8 100644 --- a/netpyne/batch/__init__.py +++ b/netpyne/batch/__init__.py @@ -4,7 +4,6 @@ """ from __future__ import unicode_literals -from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library diff --git a/netpyne/batch/asd_parallel.py b/netpyne/batch/asd_parallel.py index 54c765622..0a6ae47b4 100644 --- a/netpyne/batch/asd_parallel.py +++ b/netpyne/batch/asd_parallel.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import @@ -43,6 +42,7 @@ from .utils import createFolder from .utils import bashTemplate from .utils import dcp, sigfig +from netpyne.logger import logger pc = h.ParallelContext() # use bulletin board master/slave @@ -177,7 +177,7 @@ def asd(function, xPop, saveFile=None, args=None, stepsize=0.1, sinc=2, sdec=2, """ if randseed is not None: nr.seed(int(randseed)) # Don't reset it if not supplied - if verbose >= 3: print('ASD: Launching with random seed is %i; sample: %f' % (randseed, nr.random())) + logger.debug('ASD: Launching with random seed is %i; sample: %f' % (randseed, nr.random())) def consistentshape(userinput, origshape=False): """ @@ -201,14 +201,14 @@ def consistentshape(userinput, origshape=False): errormsg = 'ASD: The length of the input vector cannot be zero' raise Exception(errormsg) if sinc<1: - print('ASD: sinc cannot be less than 1; resetting to 2'); sinc = 2 + logger.warning('ASD: sinc cannot be less than 1; resetting to 2'); sinc = 2 if sdec<1: - print('ASD: sdec cannot be less than 1; resetting to 2'); sdec = 2 + logger.warning('ASD: sdec cannot be less than 1; resetting to 2'); sdec = 2 if pinc<1: - print('ASD: pinc cannot be less than 1; resetting to 2') + logger.warning('ASD: pinc cannot be less than 1; resetting to 2') pinc = 2 if pdec<1: - print('ASD: pdec cannot be less than 1; resetting to 2') + logger.warning('ASD: pdec cannot be less than 1; resetting to 2') pdec = 2 # Set initial parameter selection probabilities -- uniform by default @@ -269,7 +269,7 @@ def consistentshape(userinput, origshape=False): # Loop count = 0 # Keep track of how many iterations have occurred start = time() # Keep track of when we begin looping - offset = ' ' * 4 # Offset the print statements + offset = ' ' * 4 # Offset the log statements exitreason = 'Unknown exit reason' # Catch everything else while True: count += 1 # Increment the count @@ -277,11 +277,11 @@ def consistentshape(userinput, origshape=False): xnewPop = [] for icand, (x, fval, fvalnew, probabilities, stepsizes) in enumerate(zip(xPop, fvalPop, fvalnewPop, probabilitiesPop, stepsizesPop)): - if verbose == 1: print(offset + label + 'Iteration %i; elapsed %0.1f s; objective: %0.3e' % (count, time() - start, fval)) # For more verbose, use other print statement below - if verbose >= 4: print('\n\n Count=%i \n x=%s \n probabilities=%s \n stepsizes=%s' % (count, x, probabilities, stepsizes)) + logger.info(offset + label + 'Iteration %i; elapsed %0.1f s; objective: %0.3e' % (count, time() - start, fval)) # For more verbose, use other print statement below + logger.debug('\n\n Count=%i \n x=%s \n probabilities=%s \n stepsizes=%s' % (count, x, probabilities, stepsizes)) if fvalnew == maxFitness: - print('Note: rerunning candidate %i since it did not complete in previous iteration ...\n' % (icand)) + logger.info('Note: rerunning candidate %i since it did not complete in previous iteration ...\n' % (icand)) xnew = dcp(x) # if maxFitness means error evaluating function (eg. preempted job on HPC) so rerun same param set xnewPop.append(xnew) else: @@ -297,7 +297,7 @@ def consistentshape(userinput, origshape=False): if newvalxmax[par]: newval = xmax[par] # Reset to the upper limit inrange = (newval != x[par]) - if verbose >= 4: print(offset*2 + 'count=%i r=%s, choice=%s, par=%s, x[par]=%s, pm=%s, step=%s, newval=%s, xmin=%s, xmax=%s, inrange=%s' % (count, r, choice, par, x[par], (-1)**pm, stepsizes[choice], newval, xmin[par], xmax[par], inrange)) + logger.debug(offset*2 + 'count=%i r=%s, choice=%s, par=%s, x[par]=%s, pm=%s, step=%s, newval=%s, xmin=%s, xmax=%s, inrange=%s' % (count, r, choice, par, x[par], (-1)**pm, stepsizes[choice], newval, xmin[par], xmax[par], inrange)) if inrange: # Proceed as long as they're not equal break if not inrange: # Treat it as a failure if a value in range can't be found @@ -315,7 +315,7 @@ def consistentshape(userinput, origshape=False): fvalnewPop = function(xnewPop, args) # Calculate the objective function for the new parameter sets - print('\n') + logger.debug('\n') for icand, (x, xnew, fval, fvalorig, fvalnew, fvalold, fvals, probabilities, stepsizes, abserrorhistory, relerrorhistory) in \ enumerate(zip(xPop, xnewPop, fvalPop, fvalorigPop, fvalnewPop, fvaloldPop, fvalsPop, probabilitiesPop, stepsizesPop, abserrorhistoryPop, relerrorhistoryPop)): @@ -336,7 +336,7 @@ def consistentshape(userinput, origshape=False): ratio = 1.0 abserrorhistory[np.mod(count, stalliters)] = max(0, fval-fvalnew) # Keep track of improvements in the error relerrorhistory[np.mod(count, stalliters)] = max(0, ratio-1.0) # Keep track of improvements in the error - if verbose >= 3: print(offset + 'candidate %d, step=%i choice=%s, par=%s, pm=%s, origval=%s, newval=%s' % (icand, count, choice, par, pm, x[par], xnew[par])) + logger.debug(offset + 'candidate %d, step=%i choice=%s, par=%s, pm=%s, origval=%s, newval=%s' % (icand, count, choice, par, pm, x[par], xnew[par])) # Check if this step was an improvement fvalold = float(fval) # Store old fval @@ -351,9 +351,9 @@ def consistentshape(userinput, origshape=False): stepsizes[choice] = stepsizes[choice] / sdec # Decrease size of step for next time flag = '--' # Marks no change if np.isnan(fvalnew): - if verbose >= 1: print('ASD: Warning, objective function returned NaN') + logger.info('ASD: Warning, objective function returned NaN') - if verbose >= 2: print(offset + label + 'candidate %d, step %i (%0.1f s) %s (orig: %s | best:%s | new:%s | diff:%s)' % ((icand, count, time() - start, flag) + sigfig([fvalorig, fvalold, fvalnew, fvalnew - fvalold]))) + logger.debug(offset + label + 'candidate %d, step %i (%0.1f s) %s (orig: %s | best:%s | new:%s | diff:%s)' % ((icand, count, time() - start, flag) + sigfig([fvalorig, fvalold, fvalnew, fvalnew - fvalold]))) # Store output information fvals[count] = float(fval) # Store objective function evaluations @@ -361,7 +361,7 @@ def consistentshape(userinput, origshape=False): xPop[icand], xnewPop[icand], fvalPop[icand], fvalorigPop[icand], fvalnewPop[icand], fvaloldPop[icand], fvalsPop[icand], probabilitiesPop[icand], stepsizesPop[icand], abserrorhistoryPop[icand], relerrorhistoryPop[icand], allstepsPop[icand] = x, xnew, fval, fvalorig, fvalnew, fvalold, fvals, probabilities, stepsizes, abserrorhistory, relerrorhistory, allsteps - print('\n') + logger.debug('\n') if saveFile: sim.saveJSON(saveFile, {'x': allstepsPop, 'fvals': fvalsPop}) @@ -385,10 +385,9 @@ def consistentshape(userinput, origshape=False): break # Return - if verbose >= 2: - print('\n=== %s %s (steps: %i) ===' % (label, exitreason, count)) - for icand, fvals in enumerate(fvalsPop): - print(' == candidate: %d | orig: %s | best: %s | ratio: %s ==' % ((icand,) + sigfig([fvals[0], fvals[-1], fvals[-1] / fvals[0]]))) + logger.debug('\n=== %s %s (steps: %i) ===' % (label, exitreason, count)) + for icand, fvals in enumerate(fvalsPop): + logger.debug(' == candidate: %d | orig: %s | best: %s | ratio: %s ==' % ((icand,) + sigfig([fvals[0], fvals[-1], fvals[-1] / fvals[0]]))) output = {} @@ -437,9 +436,9 @@ def runASDJob(script, cfgSavePath, netParamsSavePath, simDataPath): import os - print('\nJob in rank id: ',pc.id()) + logger.info('Job in rank id: ' + pc.id()) command = 'nrniv %s simConfig=%s netParams=%s' % (script, cfgSavePath, netParamsSavePath) - print(command) + logger.info(command) with open(simDataPath+'.run', 'w') as outf, open(simDataPath+'.err', 'w') as errf: pid = Popen(command.split(' '), stdout=outf, stderr=errf, preexec_fn=os.setsid).pid @@ -532,9 +531,9 @@ def evaluator(candidates, args): self.setCfgNestedParam(paramLabel, paramVal) # modify cfg instance with candidate values - print(paramLabels, candidate) + logger.info(paramLabels, candidate) for label, value in zip(paramLabels, candidate): - print('set %s=%s' % (label, value)) + logger.info('set %s=%s' % (label, value)) self.setCfgNestedParam(label, value) #self.setCfgNestedParam("filename", jobPath) @@ -551,7 +550,7 @@ def evaluator(candidates, args): # MPI master-slaves # ---------------------------------------------------------------------- pc.submit(runASDJob, script, cfgSavePath, netParamsSavePath, jobPath) - print('-'*80) + logger.info('-'*80) else: # ---------------------------------------------------------------------- @@ -586,9 +585,9 @@ def evaluator(candidates, args): # ---------------------------------------------------------------------- # save job and run # ---------------------------------------------------------------------- - print('Submitting job ', jobName) - print(jobString) - print('-'*80) + logger.info('Submitting job ' + jobName) + logger.info(jobString) + logger.info('-'*80) # save file batchfile = '%s.sbatch' % (jobPath) with open(batchfile, 'w') as text_file: @@ -611,11 +610,11 @@ def evaluator(candidates, args): else: with open(jobPath+'.jobid', 'r') as outf: read=outf.readline() - print(read) + logger.info(read) if len(read) > 0: jobid = int(read.split()[-1]) jobids[candidate_index] = jobid - print('jobids', jobids) + logger.info('jobids' + jobids) total_jobs += 1 sleep(0.1) @@ -635,9 +634,9 @@ def evaluator(candidates, args): num_iters = 0 jobs_completed = 0 fitness = [None for cand in candidates] - # print outfilestem - print("Waiting for jobs from generation %d/%d ..." %(ngen, args.get('maxiters'))) - # print "PID's: %r" %(pids) + # log outfilestem + logger.info("Waiting for jobs from generation %d/%d ..." %(ngen, args.get('maxiters'))) + # log "PID's: %r" %(pids) # start fitness calculation while jobs_completed < total_jobs: unfinished = [i for i, x in enumerate(fitness) if x is None ] @@ -649,17 +648,15 @@ def evaluator(candidates, args): simData = json.load(file)['simData'] fitness[candidate_index] = fitnessFunc(simData, **fitnessFuncArgs) jobs_completed += 1 - print(' Candidate %d fitness = %.1f' % (candidate_index, fitness[candidate_index])) + logger.info(' Candidate %d fitness = %.1f' % (candidate_index, fitness[candidate_index])) except Exception as e: - # print err = "There was an exception evaluating candidate %d:"%(candidate_index) - print(("%s \n %s"%(err,e))) - #pass + logger.warning("%s \n %s"%(err,e)) #print 'Error evaluating fitness of candidate %d'%(candidate_index) num_iters += 1 - print('completed: %d' %(jobs_completed)) + logger.info('completed: %d' %(jobs_completed)) if num_iters >= args.get('maxiter_wait', 5000): - print("Max iterations reached, the %d unfinished jobs will be canceled and set to default fitness" % (len(unfinished))) + logger.warning("Max iterations reached, the %d unfinished jobs will be canceled and set to default fitness" % (len(unfinished))) for canditade_index in unfinished: fitness[canditade_index] = maxFitness # rerun those that didn't complete; jobs_completed += 1 @@ -704,9 +701,9 @@ def evaluator(candidates, args): # don't want to to this for hpcs since jobs are running on compute nodes not master - print("-" * 80) - print(" Completed a generation ") - print("-" * 80) + logger.info("-" * 80) + logger.info(" Completed a generation ") + logger.info("-" * 80) return fitness # single candidate for now @@ -795,14 +792,14 @@ def evaluator(candidates, args): saveFile = '%s/%s_temp_output.json' % (self.saveFolder, self.batchLabel) output = asd(evaluator, x0, saveFile, **kwargs) - # print best and finish + # log best and finish bestFval = np.min(output['fval']) bestX = output['x'][np.argmin(output['fval'])] - print('\nBest Solution with fitness = %.4g: \n' % (bestFval), bestX) - print("-" * 80) - print(" Completed adaptive stochasitc parameter optimization ") - print("-" * 80) + logger.info('Best Solution with fitness = %.4g: \n' % (bestFval) + bestX) + logger.info("-" * 80) + logger.info(" Completed adaptive stochasitc parameter optimization ") + logger.info("-" * 80) sim.saveJSON('%s/%s_output.json' % (self.saveFolder, self.batchLabel), output) #sleep(1) diff --git a/netpyne/batch/batch.py b/netpyne/batch/batch.py index 059efa5d1..0e139b1d9 100644 --- a/netpyne/batch/batch.py +++ b/netpyne/batch/batch.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import @@ -42,12 +41,13 @@ from .grid import gridSearch from .evol import evolOptim from .asd_parallel import asdOptim +from netpyne.logger import logger try: from .optuna_parallel import optunaOptim except: pass - # print('Warning: Could not import "optuna" package...') + # logger.warning('Warning: Could not import "optuna" package...') pc = h.ParallelContext() # use bulletin board master/slave @@ -153,7 +153,7 @@ def save(self, filename): from .. import sim #from json import encoder #encoder.FLOAT_REPR = lambda o: format(o, '.12g') - print(('Saving batch to %s ... ' % (filename))) + logger.info('Saving batch to %s ... ' % (filename)) sim.saveJSON(filename, dataSave) @@ -254,4 +254,4 @@ def run(self): try: optunaOptim(self, pc) except: - print(' Warning: an exception occurred when running Optuna optimization...') + logger.warning('An exception occurred when running Optuna optimization...') diff --git a/netpyne/batch/evol.py b/netpyne/batch/evol.py index 543d77c97..741196e7b 100644 --- a/netpyne/batch/evol.py +++ b/netpyne/batch/evol.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import @@ -40,6 +39,7 @@ from netpyne import specs from .utils import createFolder from .utils import bashTemplate +from netpyne.logger import logger pc = h.ParallelContext() # use bulletin board master/slave @@ -73,10 +73,10 @@ def runEvolJob(nrnCommand, script, cfgSavePath, netParamsSavePath, simDataPath): """ import os - print('\nJob in rank id: ',pc.id()) + logger.info('Job in rank id: ' + pc.id()) command = '%s %s simConfig=%s netParams=%s' % (nrnCommand, script, cfgSavePath, netParamsSavePath) - print(command) + logger.info(command) with open(simDataPath+'.run', 'w') as outf, open(simDataPath+'.err', 'w') as errf: pid = Popen(command.split(' '), stdout=outf, stderr=errf, preexec_fn=os.setsid).pid @@ -173,7 +173,7 @@ def evaluator(candidates, args): # modify cfg instance with candidate values for label, value in zip(paramLabels, candidate): - print('set %s=%s' % (label, value)) + logger.info('set %s=%s' % (label, value)) self.setCfgNestedParam(label, value) #self.setCfgNestedParam("filename", jobPath) @@ -190,7 +190,7 @@ def evaluator(candidates, args): # MPI master-slaves # ---------------------------------------------------------------------- pc.submit(runEvolJob, nrnCommand, script, cfgSavePath, netParamsSavePath, jobPath) - print('-'*80) + logger.info('-'*80) else: # ---------------------------------------------------------------------- @@ -228,9 +228,9 @@ def evaluator(candidates, args): # ---------------------------------------------------------------------- # save job and run # ---------------------------------------------------------------------- - print('Submitting job ', jobName) - print(jobString) - print('-'*80) + logger.info('Submitting job ', jobName) + logger.info(jobString) + logger.info('-'*80) # save file batchfile = '%s.sbatch' % (jobPath) with open(batchfile, 'w') as text_file: @@ -244,11 +244,11 @@ def evaluator(candidates, args): #read = proc.stdout.read() with open(jobPath+'.jobid', 'r') as outf: read=outf.readline() - print(read) + logger.info(read) if len(read) > 0: jobid = int(read.split()[-1]) jobids[candidate_index] = jobid - print('jobids', jobids) + logger.info('jobids', jobids) total_jobs += 1 sleep(0.1) @@ -268,9 +268,9 @@ def evaluator(candidates, args): num_iters = 0 jobs_completed = 0 fitness = [None for cand in candidates] - # print outfilestem - print("Waiting for jobs from generation %d/%d ..." %(ngen, args.get('max_generations'))) - # print "PID's: %r" %(pids) + # logger.info outfilestem + logger.info("Waiting for jobs from generation %d/%d ..." %(ngen, args.get('max_generations'))) + # logger.info "PID's: %r" %(pids) # start fitness calculation while jobs_completed < total_jobs: unfinished = [i for i, x in enumerate(fitness) if x is None ] @@ -282,23 +282,21 @@ def evaluator(candidates, args): simData = json.load(file)['simData'] fitness[candidate_index] = fitnessFunc(simData, **fitnessFuncArgs) jobs_completed += 1 - print(' Candidate %d fitness = %.1f' % (candidate_index, fitness[candidate_index])) + logger.info(' Candidate %d fitness = %.1f' % (candidate_index, fitness[candidate_index])) elif os.path.isfile(jobNamePath+'.pkl'): with open('%s.pkl'% (jobNamePath), 'rb') as file: simData = pickle.load(file)['simData'] fitness[candidate_index] = fitnessFunc(simData, **fitnessFuncArgs) jobs_completed += 1 - print(' Candidate %d fitness = %.1f' % (candidate_index, fitness[candidate_index])) + logger.info(' Candidate %d fitness = %.1f' % (candidate_index, fitness[candidate_index])) except Exception as e: - # print err = "There was an exception evaluating candidate %d:"%(candidate_index) - print(("%s \n %s"%(err,e))) - #pass - #print 'Error evaluating fitness of candidate %d'%(candidate_index) + logger.warning("%s \n %s"%(err,e)) + #logger.warning 'Error evaluating fitness of candidate %d'%(candidate_index) num_iters += 1 - print('completed: %d' %(jobs_completed)) + logger.info('completed: %d' %(jobs_completed)) if num_iters >= args.get('maxiter_wait', 5000): - print("Max iterations reached, the %d unfinished jobs will be canceled and set to default fitness" % (len(unfinished))) + logger.warning("Max iterations reached, the %d unfinished jobs will be canceled and set to default fitness" % (len(unfinished))) for canditade_index in unfinished: fitness[canditade_index] = defaultFitness jobs_completed += 1 @@ -333,9 +331,9 @@ def evaluator(candidates, args): # except: # pass # return - print("-"*80) - print(" Completed a generation ") - print("-"*80) + logger.info("-"*80) + logger.info(" Completed a generation ") + logger.info("-"*80) return fitness @@ -487,9 +485,9 @@ def nonuniform_bounds_mutation(random, candidate, args): stats_file.close() ind_stats_file.close() - # print best and finish - print(('Best Solution: \n{0}'.format(str(max(final_pop))))) - print("-"*80) - print(" Completed evolutionary algorithm parameter optimization ") - print("-"*80) + # log best and finish + logger.info('Best Solution: \n{0}'.format(str(max(final_pop)))) + logger.info("-"*80) + logger.info(" Completed evolutionary algorithm parameter optimization ") + logger.info("-"*80) sys.exit() diff --git a/netpyne/batch/grid.py b/netpyne/batch/grid.py index 9d7bd3ff8..664ee0f85 100644 --- a/netpyne/batch/grid.py +++ b/netpyne/batch/grid.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import @@ -39,6 +38,7 @@ from netpyne import specs from .utils import createFolder from .utils import bashTemplate +from netpyne.logger import logger pc = h.ParallelContext() # use bulletin board master/slave @@ -70,11 +70,11 @@ def runJob(script, cfgSavePath, netParamsSavePath, processes): """ - print('\nJob in rank id: ',pc.id()) + logger.info('Job in rank id: ' + pc.id()) command = 'nrniv %s simConfig=%s netParams=%s' % (script, cfgSavePath, netParamsSavePath) - print(command+'\n') + logger.info(command+'\n') proc = Popen(command.split(' '), stdout=PIPE, stderr=PIPE) - print(proc.stdout.read().decode()) + logger.info(proc.stdout.read().decode()) processes.append(proc) @@ -187,13 +187,13 @@ def gridSearch(self, pc): iComb = [] pComb = [] - print(iComb, pComb) + logger.info(iComb, pComb) for i, paramVal in enumerate(pComb): paramLabel = labelList[i] self.setCfgNestedParam(paramLabel, paramVal) - print(str(paramLabel)+' = '+str(paramVal)) + logger.info(str(paramLabel)+' = '+str(paramVal)) # set simLabel and jobName simLabel = self.batchLabel+''.join([''.join('_'+str(i)) for i in iComb]) @@ -203,11 +203,11 @@ def gridSearch(self, pc): # skip if output file already exists if self.runCfg.get('skip', False) and glob.glob(jobName+'.json'): - print('Skipping job %s since output file already exists...' % (jobName)) + logger.warning('Skipping job %s since output file already exists...' % (jobName)) elif self.runCfg.get('skipCfg', False) and glob.glob(jobName+'_cfg.json'): - print('Skipping job %s since cfg file already exists...' % (jobName)) + logger.warning('Skipping job %s since cfg file already exists...' % (jobName)) elif self.runCfg.get('skipCustom', None) and glob.glob(jobName+self.runCfg['skipCustom']): - print('Skipping job %s since %s file already exists...' % (jobName, self.runCfg['skipCustom'])) + logger.warning('Skipping job %s since %s file already exists...' % (jobName, self.runCfg['skipCustom'])) else: # save simConfig json to saveFolder self.cfg.simLabel = simLabel @@ -248,8 +248,8 @@ def gridSearch(self, pc): """ % (jobName, walltime, queueName, nodesppn, jobName, jobName, custom, command) # Send job_string to qsub - print('Submitting job ',jobName) - print(jobString+'\n') + logger.info('Submitting job ' + jobName) + logger.info(jobString + '\n') batchfile = '%s.pbs'%(jobName) with open(batchfile, 'w') as text_file: @@ -304,8 +304,8 @@ def gridSearch(self, pc): # Send job_string to sbatch - print('Submitting job ',jobName) - print(jobString+'\n') + logger.info('Submitting job ' + jobName) + logger.info(jobString+'\n') batchfile = '%s.sbatch'%(jobName) with open(batchfile, 'w') as text_file: @@ -320,7 +320,7 @@ def gridSearch(self, pc): # eg. usage: python batch.py elif self.runCfg.get('type',None) == 'mpi_direct': jobName = self.saveFolder+'/'+simLabel - print('Running job ',jobName) + logger.info('Running job ' + jobName) cores = self.runCfg.get('cores', 1) folder = self.runCfg.get('folder', '.') script = self.runCfg.get('script', 'init.py') @@ -329,7 +329,7 @@ def gridSearch(self, pc): command = '%s -n %d nrniv -python -mpi %s simConfig=%s netParams=%s' % (mpiCommand, cores, script, cfgSavePath, netParamsSavePath) - print(command+'\n') + logger.info(command+'\n') proc = Popen(command.split(' '), stdout=open(jobName+'.run','w'), stderr=open(jobName+'.err','w')) processes.append(proc) processFiles.append(jobName+'.run') @@ -339,33 +339,34 @@ def gridSearch(self, pc): elif self.runCfg.get('type',None) == 'mpi_bulletin': jobName = self.saveFolder+'/'+simLabel printOutput = self.runCfg.get('printOutput', False) - print('Submitting job ',jobName) + logger.info('Submitting job ' + jobName) # master/slave bulletin board schedulling of jobs pc.submit(runJob, self.runCfg.get('script', 'init.py'), cfgSavePath, netParamsSavePath, processes) else: - print(self.runCfg) - print("Error: invalid runCfg 'type' selected; valid types are 'mpi_bulletin', 'mpi_direct', 'hpc_slurm', 'hpc_torque'") + logger.warning(self.runCfg) + logger.warning("Error: invalid runCfg 'type' selected; valid types are 'mpi_bulletin', 'mpi_direct', 'hpc_slurm', 'hpc_torque'") import sys sys.exit(0) sleep(sleepInterval) # avoid saturating scheduler - print("-"*80) - print(" Finished submitting jobs for grid parameter exploration ") - print("-" * 80) + logger.info("-"*80) + logger.info(" Finished submitting jobs for grid parameter exploration ") + logger.info("-" * 80) while pc.working(): sleep(sleepInterval) - + outfiles = [] for procFile in processFiles: outfiles.append(open(procFile, 'r')) while any([proc.poll() is None for proc in processes]): for i, proc in enumerate(processes): - newline = outfiles[i].readline() - if len(newline) > 1: - print(newline, end='') - + newline = outfiles[i].readline() + if len(newline) > 1: + # TODO this needs to be changed to logger - but check how to better do it + print(newline, end='') + #sleep(sleepInterval) # attempt to terminate completed processes diff --git a/netpyne/batch/optuna_parallel.py b/netpyne/batch/optuna_parallel.py index 1a5593dc2..73ea0652d 100644 --- a/netpyne/batch/optuna_parallel.py +++ b/netpyne/batch/optuna_parallel.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import @@ -45,6 +44,7 @@ from .utils import createFolder from .utils import bashTemplate from .utils import dcp, sigfig +from netpyne.logger import logger pc = h.ParallelContext() # use bulletin board master/slave @@ -82,9 +82,9 @@ def runJob(nrnCommand, script, cfgSavePath, netParamsSavePath, simDataPath): """ import os - print('\nJob in rank id: ',pc.id()) + logger.info('Job in rank id: ' + pc.id()) command = '%s %s simConfig=%s netParams=%s' % (nrnCommand, script, cfgSavePath, netParamsSavePath) - print(command) + logger.info(command) with open(simDataPath+'.run', 'w') as outf, open(simDataPath+'.err', 'w') as errf: pid = Popen(command.split(' '), stdout=outf, stderr=errf, preexec_fn=os.setsid).pid @@ -187,9 +187,9 @@ def objective(trial, args): self.setCfgNestedParam(paramLabel, paramVal) # modify cfg instance with candidate values - #print(paramLabels, candidate) + #logger.info(paramLabels, candidate) for label, value in zip(paramLabels, candidate): - print('set %s=%s' % (label, value)) + logger.info('set %s=%s' % (label, value)) self.setCfgNestedParam(label, value) #self.setCfgNestedParam("filename", jobPath) @@ -206,7 +206,7 @@ def objective(trial, args): # MPI master-slaves # ---------------------------------------------------------------------- pc.submit(runJob, nrnCommand, script, cfgSavePath, netParamsSavePath, jobPath) - print('-'*80) + logger.info('-'*80) else: # ---------------------------------------------------------------------- @@ -244,9 +244,9 @@ def objective(trial, args): # ---------------------------------------------------------------------- # save job and run # ---------------------------------------------------------------------- - print('Submitting job ', jobName) - print(jobString) - print('-'*80) + logger.info('Submitting job ' + jobName) + logger.info(jobString) + logger.info('-'*80) # save file batchfile = '%s.sbatch' % (jobPath) with open(batchfile, 'w') as text_file: @@ -269,11 +269,12 @@ def objective(trial, args): else: with open(jobPath+'.jobid', 'r') as outf: read=outf.readline() - print(read) + logger.info(read) if len(read) > 0: jobid = int(read.split()[-1]) jobids[candidate_index] = jobid - print('jobids', jobids) + logger.info('jobids') + logger.info(jobids) total_jobs += 1 sleep(0.1) @@ -293,9 +294,9 @@ def objective(trial, args): num_iters = 0 jobs_completed = 0 fitness = [None] # just 1 candidate - # print outfilestem - print("Waiting for jobs from generation %d/%d ..." %(ngen, args.get('maxiters'))) - # print "PID's: %r" %(pids) + # log outfilestem + logger.info("Waiting for jobs from generation %d/%d ..." %(ngen, args.get('maxiters'))) + # logger.info "PID's: %r" %(pids) # start fitness calculation while jobs_completed < total_jobs: unfinished = [i for i, x in enumerate(fitness) if x is None ] @@ -307,20 +308,20 @@ def objective(trial, args): simData = json.load(file)['simData'] fitness[candidate_index] = fitnessFunc(simData, **fitnessFuncArgs) jobs_completed += 1 - print(' Candidate %d fitness = %.1f' % (candidate_index, fitness[candidate_index])) + logger.info(' Candidate %d fitness = %.1f' % (candidate_index, fitness[candidate_index])) elif os.path.isfile(jobNamePath+'.pkl'): with open('%s.pkl'% (jobNamePath), 'rb') as file: simData = pickle.load(file)['simData'] fitness[candidate_index] = fitnessFunc(simData, **fitnessFuncArgs) jobs_completed += 1 - print(' Candidate %d fitness = %.1f' % (candidate_index, fitness[candidate_index])) + logger.info(' Candidate %d fitness = %.1f' % (candidate_index, fitness[candidate_index])) except Exception as e: err = "There was an exception evaluating candidate %d:"%(candidate_index) - print(("%s \n %s"%(err,e))) + logger.warning("%s \n %s"%(err,e)) num_iters += 1 - print('completed: %d' %(jobs_completed)) + logger.info('completed: %d' %(jobs_completed)) if num_iters >= args.get('maxiter_wait', 5000): - print("Max iterations reached, the %d unfinished jobs will be canceled and set to default fitness" % (len(unfinished))) + logger.warning("Max iterations reached, the %d unfinished jobs will be canceled and set to default fitness" % (len(unfinished))) for canditade_index in unfinished: fitness[canditade_index] = maxFitness # rerun those that didn't complete; jobs_completed += 1 @@ -365,9 +366,9 @@ def objective(trial, args): # don't want to to this for hpcs since jobs are running on compute nodes not master - print("-" * 80) - print(" Completed a generation ") - print("-" * 80) + logger.info("-" * 80) + logger.info(" Completed a generation ") + logger.info("-" * 80) return fitness[0] # single candidate for now @@ -435,29 +436,30 @@ def objective(trial, args): try: study.optimize(lambda trial: objective(trial, args), n_trials=args['maxiters'], timeout=args['maxtime']) except Exception as e: - print(e) + logger.warning(e) - # print best and finish + # log best and finish if rank == size-1: df = study.trials_dataframe(attrs=('number', 'value', 'params', 'state')) importance = optuna.importance.get_param_importances(study=study) - print('\nBest trial: ', study.best_trial) - print('\nParameter importance: ', dict(importance)) + logger.info('Best trial: ' + study.best_trial) + logger.info('Parameter importance:') + logger.info(dict(importance)) - print('\nBest Solution with fitness = %.4g: \n' % (study.best_value), study.best_params) + logger.info('Best Solution with fitness = %.4g:' % (study.best_value)) + logger.info(study.best_params) - print('\nSaving to output.pkl...\n') + logger.info('Saving to output.pkl...') output = {'study': study, 'df': df, 'importance': importance} with open('%s/%s_output.pkl' % (self.saveFolder, self.batchLabel), 'wb') as f: pickle.dump(output, f) sleep(1) - print("-" * 80) - print(" Completed Optuna parameter optimization ") - print("-" * 80) - + logger.info("-" * 80) + logger.info(" Completed Optuna parameter optimization ") + logger.info("-" * 80) sys.exit() diff --git a/netpyne/batch/utils.py b/netpyne/batch/utils.py index 45ca1b0f9..c443a3b25 100644 --- a/netpyne/batch/utils.py +++ b/netpyne/batch/utils.py @@ -4,7 +4,6 @@ """ from __future__ import unicode_literals -from __future__ import print_function from __future__ import division from __future__ import absolute_import @@ -12,6 +11,7 @@ standard_library.install_aliases() import numpy as np +from netpyne.logger import logger # ------------------------------------------------------------------------------- # function to create a folder if it does not exist @@ -36,7 +36,7 @@ def createFolder(folder): try: os.mkdir(folder) except OSError: - print(' Could not create %s' %(folder)) + logger.warning('Could not create %s' % folder) # ------------------------------------------------------------------------------- @@ -118,9 +118,9 @@ def cp(obj, verbose=True, die=True): output = copy.copy(obj) except Exception as E: output = obj - errormsg = 'Warning: could not perform shallow copy, returning original object: %s' % str(E) + errormsg = 'Could not perform shallow copy, returning original object: %s' % str(E) if die: raise Exception(errormsg) - else: print(errormsg) + else: logger.warning(errormsg) return output def dcp(obj, verbose=True, die=False): @@ -150,9 +150,9 @@ def dcp(obj, verbose=True, die=False): output = copy.deepcopy(obj) except Exception as E: output = cp(obj) - errormsg = 'Warning: could not perform deep copy, performing shallow instead: %s' % str(E) + errormsg = 'Could not perform deep copy, performing shallow instead: %s' % str(E) if die: raise Exception(errormsg) - else: print(errormsg) + else: logger.warning(errormsg) return output diff --git a/netpyne/cell/NML2SpikeSource.py b/netpyne/cell/NML2SpikeSource.py index 96c488dae..6d9d8ba7b 100644 --- a/netpyne/cell/NML2SpikeSource.py +++ b/netpyne/cell/NML2SpikeSource.py @@ -49,4 +49,4 @@ def initRandom(self): self.secs['soma']['pointps'][self.tags['cellType']].hObj.noiseFromRandom(rand) # use random number generator sim._init_stim_randomizer(rand, self.tags['pop'], self.tags['cellLabel'], seed) randContainer['hRandom'].negexp(1) - #print("Created Random: %s with %s (%s)"%(rand,seed, sim.cfg.seeds)) + #logger.debug("Created Random: %s with %s (%s)"%(rand,seed, sim.cfg.seeds)) diff --git a/netpyne/cell/cell.py b/netpyne/cell/cell.py index 38744bbbb..c5f5291aa 100644 --- a/netpyne/cell/cell.py +++ b/netpyne/cell/cell.py @@ -4,7 +4,6 @@ """ from __future__ import division -from __future__ import print_function from __future__ import unicode_literals from __future__ import absolute_import @@ -22,7 +21,7 @@ from copy import deepcopy from neuron import h # Import NEURON from ..specs import Dict - +from netpyne.logger import logger ############################################################################### # @@ -135,7 +134,7 @@ def addNetStim (self, params, stimContainer=None): self.stims.append(Dict(params.copy())) # add new stim to Cell object stimContainer = self.stims[-1] - if sim.cfg.verbose: print((' Created %s NetStim for cell gid=%d'% (params['source'], self.gid))) + logger.debug(' Created %s NetStim for cell gid=%d'% (params['source'], self.gid)) if sim.cfg.createNEURONObj: rand = h.Random() @@ -148,9 +147,9 @@ def addNetStim (self, params, stimContainer=None): netstim.interval = 0.1**-1*1e3 # inverse of the frequency and then convert from Hz^-1 to ms (set very low) netstim.noise = params['noise'] except: - print('Error: tried to create variable rate NetStim but NSLOC mechanism not available') + logger.warning('Error: tried to create variable rate NetStim but NSLOC mechanism not available') else: - print('Error: Unknown stimulation rate type: %s'%(h.params['rate'])) + logger.warning('Error: Unknown stimulation rate type: %s'%(h.params['rate'])) else: netstim = h.NetStim() netstim.interval = params['rate']**-1*1e3 # inverse of the frequency and then convert from Hz^-1 to ms @@ -257,7 +256,7 @@ def recordTraces (self): ptr.extend([getattr(conn[params['mech']], '_ref_'+params['var'])]) secLocs.extend([params['sec']+'_conn_'+str(conn_idx)]) else: - print("Error recording conn trace, you need to specify the conn mech to record from.") + logger.warning("Error recording conn trace, you need to specify the conn mech to record from.") elif 'var' in params: # point process cell eg. cell._ref_v ptr = getattr(self.hPointp, '_ref_'+params['var']) @@ -301,15 +300,14 @@ def recordTraces (self): else: sim.simData[key]['cell_'+str(self.gid)] = h.Vector(sim.cfg.duration/sim.cfg.recordStep+1).resize(0) sim.simData[key]['cell_'+str(self.gid)].record(ptr, sim.cfg.recordStep) - if sim.cfg.verbose: - print(' Recording ', key, 'from cell ', self.gid, ' with parameters: ',str(params)) - print(sim.simData[key]['cell_'+str(self.gid)]) + logger.debug(' Recording ', key, 'from cell ', self.gid, ' with parameters: ',str(params)) + logger.debug(sim.simData[key]['cell_'+str(self.gid)]) except: - if sim.cfg.verbose: print(' Cannot record ', key, 'from cell ', self.gid) + logger.debug(' Cannot record ' + key + 'from cell ' + self.gid) else: - if sim.cfg.verbose: print(' Conditions preclude recording ', key, ' from cell ', self.gid) + logger.debug(' Conditions preclude recording ' + key + ' from cell ' + self.gid) #else: - # if sim.cfg.verbose: print ' NOT recording ', key, 'from cell ', self.gid, ' with parameters: ',str(params) + # logger.debug ' NOT recording ', key, 'from cell ', self.gid, ' with parameters: ',str(params) diff --git a/netpyne/cell/compartCell.py b/netpyne/cell/compartCell.py index 68331a154..d1d3bd6f4 100644 --- a/netpyne/cell/compartCell.py +++ b/netpyne/cell/compartCell.py @@ -4,7 +4,6 @@ """ from __future__ import division -from __future__ import print_function from __future__ import unicode_literals from __future__ import absolute_import @@ -29,7 +28,7 @@ from math import sin, cos from .cell import Cell from ..specs import Dict - +from netpyne.logger import logger ############################################################################### # @@ -267,14 +266,14 @@ def __dipoleInsert(self, secName, sec): try: sec['hObj'].insert('dipole') except: - print('Error inserting dipole mechanism') + logger.warning('Error inserting dipole mechanism') return -1 # insert Dipole point process (dipole_pp.mod) try: sec['hDipole_pp'] = h.Dipole(1.0, sec = sec['hObj']) except: - print('Error inserting Dipole point process') + logger.warning('Error inserting Dipole point process') return -1 dpp = sec['hDipole_pp'] @@ -366,8 +365,7 @@ def createNEURONObj (self, prop): sec['hObj'].insert(mechName) except: mechInsertError = True - if sim.cfg.verbose: - print('# Error inserting %s mechanims in %s section! (check mod files are compiled)'%(mechName, sectName)) + logger.debug('# Error inserting %s mechanims in %s section! (check mod files are compiled)'%(mechName, sectName)) continue for mechParamName,mechParamValue in mechParams.items(): # add params of the mechanism mechParamValueFinal = mechParamValue @@ -389,8 +387,7 @@ def createNEURONObj (self, prop): sec['hObj'].insert(ionName+'_ion') # insert mechanism except: mechInsertError = True - if sim.cfg.verbose: - print('# Error inserting %s ion in %s section!'%(ionName, sectName)) + logger.debug('# Error inserting %s ion in %s section!'%(ionName, sectName)) continue for ionParamName,ionParamValue in ionParams.items(): # add params of the mechanism ionParamValueFinal = ionParamValue @@ -406,7 +403,7 @@ def createNEURONObj (self, prop): setattr(seg, '%si'%ionName, ionParamValueFinal) h('%si0_%s_ion = %s'%(ionName,ionName,ionParamValueFinal)) # e.g. cai0_ca_ion, the default initial value - #if sim.cfg.verbose: print("Updated ion: %s in %s, e: %s, o: %s, i: %s" % \ + # logger.debug("Updated ion: %s in %s, e: %s, o: %s, i: %s" % \ # (ionName, sectName, seg.__getattribute__('e'+ionName), seg.__getattribute__(ionName+'o'), seg.__getattribute__(ionName+'i'))) # add synMechs (only used when loading because python synMechs already exist) @@ -454,9 +451,9 @@ def createNEURONObj (self, prop): if 'mechs' in sectParams and 'dipole' in sectParams['mechs']: self.__dipoleInsert(sectName, sec) # add dipole mechanisms to each section - # Print message about error inserting mechanisms + # Log message about error inserting mechanisms if mechInsertError: - print("ERROR: Some mechanisms and/or ions were not inserted (for details run with cfg.verbose=True). Make sure the required mod files are compiled.") + logger.warning("ERROR: Some mechanisms and/or ions were not inserted (for details run with cfg.verbose=True). Make sure the required mod files are compiled.") def addSynMechNEURONObj(self, synMech, synMechParams, sec, loc): @@ -536,9 +533,11 @@ def addConnsNEURONObj(self): try: postTarget = synMech['hObj'] except: - print('\nError: no synMech available for conn: ', conn) - print(' cell tags: ',self.tags) - print(' cell synMechs: ',self.secs[conn['sec']]['synMechs']) + logger.warning('Error: no synMech available for conn: ' + conn) + logger.warning('cell tags:') + logger.warning(self.tags) + logger.warning('cell synMechs:') + logger.warning(self.secs[conn['sec']]['synMechs']) import sys sys.exit() @@ -680,7 +679,7 @@ def modifySynMechs (self, params): try: setattr(synMech['hObj'], synParamName, synParamValue) except: - print('Error setting %s=%s on synMech' % (synParamName, str(synParamValue))) + logger.warning('Error setting %s=%s on synMech' % (synParamName, str(synParamValue))) @@ -703,9 +702,9 @@ def addConn (self, params, netStimParams = None): # Only allow self connections if option selected by user # !!!! AD HOC RULE FOR HNN!!! - or 'soma' in secLabels and not self.tags['cellType'] == 'L5Basket' (removed) if sim.cfg.allowSelfConns: - if sim.cfg.verbose: print(' Warning: creating self-connection on cell gid=%d, section=%s '%(self.gid, params.get('sec'))) + logger.debug(' Warning: creating self-connection on cell gid=%d, section=%s '%(self.gid, params.get('sec'))) else: - if sim.cfg.verbose: print(' Error: attempted to create self-connection on cell gid=%d, section=%s '%(self.gid, params.get('sec'))) + logger.debug(' Error: attempted to create self-connection on cell gid=%d, section=%s '%(self.gid, params.get('sec'))) return # if self-connection return # Weight @@ -850,15 +849,13 @@ def addConn (self, params, netStimParams = None): # Add plasticity self._addConnPlasticity(params, sec, netcon, weightIndex) - if sim.cfg.verbose: - sec = params['sec'] if pointp else synMechSecs[i] - loc = params['loc'] if pointp else synMechLocs[i] - preGid = netStimParams['source']+' NetStim' if netStimParams else params['preGid'] - try: - print((' Created connection preGid=%s, postGid=%s, sec=%s, loc=%.4g, synMech=%s, weight=%.4g, delay=%.2f' - % (preGid, self.gid, sec, loc, params['synMech'], weights[i], delays[i]))) - except: - print((' Created connection preGid=%s' % (preGid))) + sec = params['sec'] if pointp else synMechSecs[i] + loc = params['loc'] if pointp else synMechLocs[i] + preGid = netStimParams['source']+' NetStim' if netStimParams else params['preGid'] + try: + logger.debug(' Created connection preGid=%s, postGid=%s, sec=%s, loc=%.4g, synMech=%s, weight=%.4g, delay=%.2f' % (preGid, self.gid, sec, loc, params['synMech'], weights[i], delays[i])) + except: + logger.debug(' Created connection preGid=%s' % (preGid)) def modifyConns (self, params): @@ -923,7 +920,7 @@ def modifyConns (self, params): break except: pass - #print('Warning: modifyConns() does not yet support conditions of presynaptic cells when running parallel sims') + #logger.warning('modifyConns() does not yet support conditions of presynaptic cells when running parallel sims') if conditionsMet: # if all conditions are met, set values for this cell if sim.cfg.createPyStruct: @@ -937,7 +934,7 @@ def modifyConns (self, params): else: setattr(conn['hObj'], paramName, paramValue) except: - print('Error setting %s=%s on Netcon' % (paramName, str(paramValue))) + logger.warning('Error setting %s=%s on Netcon' % (paramName, str(paramValue))) def modifyStims (self, params): @@ -1003,7 +1000,7 @@ def modifyStims (self, params): else: setattr(stim['hObj'], paramName, paramValue) except: - print('Error setting %s=%s on stim' % (paramName, str(paramValue))) + logger.warning('Error setting %s=%s on stim' % (paramName, str(paramValue))) @@ -1011,13 +1008,13 @@ def addStim (self, params): from .. import sim if not params['sec'] or (isinstance(params['sec'], basestring) and not params['sec'] in list(self.secs.keys())+list(self.secLists.keys())): - if sim.cfg.verbose: print(' Warning: no valid sec specified for stim on cell gid=%d so using soma or 1st available. Existing secs: %s; params: %s'%(self.gid, list(self.secs.keys()),params)) + logger.debug(' Warning: no valid sec specified for stim on cell gid=%d so using soma or 1st available. Existing secs: %s; params: %s'%(self.gid, list(self.secs.keys()),params)) if 'soma' in self.secs: params['sec'] = 'soma' # use 'soma' if exists elif self.secs: params['sec'] = list(self.secs.keys())[0] # if no 'soma', use first sectiona available else: - if sim.cfg.verbose: print(' Error: no Section available on cell gid=%d to add stim'%(self.gid)) + logger.debug(' Error: no Section available on cell gid=%d to add stim'%(self.gid)) return if not 'loc' in params: params['loc'] = 0.5 # default stim location @@ -1069,22 +1066,22 @@ def addStim (self, params): self.stims.append(Dict(params)) # add to python structure self.stims[-1]['hObj'] = stim # add stim object to dict in stims list - if sim.cfg.verbose: print((' Added %s %s to cell gid=%d, sec=%s, loc=%.4g%s'% - (params['source'], params['type'], self.gid, params['sec'], params['loc'], stringParams))) + logger.debug(' Added %s %s to cell gid=%d, sec=%s, loc=%.4g%s'% + (params['source'], params['type'], self.gid, params['sec'], params['loc'], stringParams)) else: - if sim.cfg.verbose: print(('Adding exotic stim (NeuroML 2 based?): %s'% params)) + logger.debug('Adding exotic stim (NeuroML 2 based?): %s'% params) sec = self.secs[params['sec']] stim = getattr(h, params['type'])(sec['hObj'](params['loc'])) stimParams = {k:v for k,v in params.items() if k not in ['type', 'source', 'loc', 'sec', 'label']} stringParams = '' for stimParamName, stimParamValue in stimParams.items(): # set mechanism internal params if isinstance(stimParamValue, list): - print("Can't set point process paramaters of type vector eg. VClamp.amp[3]") + logger.info("Can't set point process paramaters of type vector eg. VClamp.amp[3]") pass #setattr(stim, stimParamName._ref_[0], stimParamValue[0]) elif 'originalFormat' in params and stimParamName=='originalFormat' and params['originalFormat']=='NeuroML2_stochastic_input': - if sim.cfg.verbose: print((' originalFormat: %s'%(params['originalFormat']))) + logger.debug(' originalFormat: %s'%(params['originalFormat'])) rand = h.Random() stim_ref = params['label'][:params['label'].rfind(self.tags['pop'])] @@ -1103,8 +1100,8 @@ def addStim (self, params): self.stims.append(params) # add to python structure self.stims[-1]['hObj'] = stim # add stim object to dict in stims list - if sim.cfg.verbose: print((' Added %s %s to cell gid=%d, sec=%s, loc=%.4g%s'% - (params['source'], params['type'], self.gid, params['sec'], params['loc'], stringParams))) + logger.debug(' Added %s %s to cell gid=%d, sec=%s, loc=%.4g%s'% + (params['source'], params['type'], self.gid, params['sec'], params['loc'], stringParams)) def _setConnSections (self, params): @@ -1112,13 +1109,13 @@ def _setConnSections (self, params): # if no section specified or single section specified does not exist if not params.get('sec') or (isinstance(params.get('sec'), basestring) and not params.get('sec') in list(self.secs.keys())+list(self.secLists.keys())): - if sim.cfg.verbose: print(' Warning: no valid sec specified for connection to cell gid=%d so using soma or 1st available'%(self.gid)) + logger.debug(' Warning: no valid sec specified for connection to cell gid=%d so using soma or 1st available'%(self.gid)) if 'soma' in self.secs: params['sec'] = 'soma' # use 'soma' if exists elif self.secs: params['sec'] = list(self.secs.keys())[0] # if no 'soma', use first sectiona available else: - if sim.cfg.verbose: print(' Error: no Section available on cell gid=%d to add connection'%(self.gid)) + logger.debug(' Error: no Section available on cell gid=%d to add connection'%(self.gid)) sec = -1 # if no Sections available print error and exit return sec @@ -1130,7 +1127,7 @@ def _setConnSections (self, params): secLabels = [] for i,section in enumerate(secList): if section not in self.secs: # remove sections that dont exist; and corresponding weight and delay - if sim.cfg.verbose: print(' Error: Section %s not available so removing from list of sections for connection to cell gid=%d'%(section, self.gid)) + logger.debug(' Error: Section %s not available so removing from list of sections for connection to cell gid=%d'%(section, self.gid)) secList.remove(section) if isinstance(params['weight'], list): params['weight'].remove(params['weight'][i]) if isinstance(params['delay'], list): params['delay'].remove(params['delay'][i]) @@ -1181,7 +1178,7 @@ def _setConnPointP(self, params, secLabels, weightIndex): weightIndex = pointpParams['synList'].index(params.get('synMech')) # udpate weight index based pointp synList if pointp and params['synsPerConn'] > 1: # only single synapse per connection rule allowed - if sim.cfg.verbose: print(' Error: Multiple synapses per connection rule not allowed for cells where V is not in section (cell gid=%d) '%(self.gid)) + logger.debug(' Error: Multiple synapses per connection rule not allowed for cells where V is not in section (cell gid=%d) '%(self.gid)) return -1, weightIndex return pointp, weightIndex @@ -1195,10 +1192,10 @@ def _setConnSynMechs (self, params, secLabels): if sim.net.params.synMechParams: # if no synMech specified, but some synMech params defined synLabel = list(sim.net.params.synMechParams.keys())[0] # select first synMech from net params and add syn params['synMech'] = synLabel - if sim.cfg.verbose: print(' Warning: no synaptic mechanisms specified for connection to cell gid=%d so using %s '%(self.gid, synLabel)) + logger.debug(' Warning: no synaptic mechanisms specified for connection to cell gid=%d so using %s '%(self.gid, synLabel)) else: # if no synaptic mechanism specified and no synMech params available - if sim.cfg.verbose: print(' Error: no synaptic mechanisms available to add conn on cell gid=%d '%(self.gid)) - return -1 # if no Synapse available print error and exit + logger.debug(' Error: no synaptic mechanisms available to add conn on cell gid=%d '%(self.gid)) + return -1 # if no Synapse available log error and exit # if desired synaptic mechanism specified in conn params if synsPerConn > 1: # if more than 1 synapse @@ -1208,7 +1205,7 @@ def _setConnSynMechs (self, params, secLabels): if len(params['loc']) == synsPerConn: synMechLocs = params['loc'] else: - print("Error: The length of the list of locations does not match synsPerConn (distributing uniformly)") + logger.warning("Error: The length of the list of locations does not match synsPerConn (distributing uniformly)") synMechSecs, synMechLocs = self._distributeSynsUniformly(secList=secLabels, numSyns=synsPerConn) else: synMechLocs = [i*(1.0/synsPerConn)+1.0/synsPerConn/2 for i in range(synsPerConn)] @@ -1223,7 +1220,7 @@ def _setConnSynMechs (self, params, secLabels): if len(params['loc']) == synsPerConn: # list of locs matches num syns synMechLocs = params['loc'] else: # list of locs does not match num syns - print("Error: The length of the list of locations does not match synsPerConn (with cfg.distributeSynsUniformly = False") + logger.warning("Error: The length of the list of locations does not match synsPerConn (with cfg.distributeSynsUniformly = False") return else: # single loc synMechLocs = [params['loc']] * synsPerConn @@ -1248,8 +1245,8 @@ def _setConnSynMechs (self, params, secLabels): randLoc = rand.uniform(0, 1) synMechLocs = [rand.uniform(0, 1) for i in range(synsPerConn)] else: - print("\nError: The length of the list of sections needs to be greater or equal to the synsPerConn (with cfg.connRandomSecFromList = True") - return + logger.warning("Error: The length of the list of sections needs to be greater or equal to the synsPerConn (with cfg.connRandomSecFromList = True") + return else: # if 1 synapse # by default place on 1st section of list and location available @@ -1282,8 +1279,7 @@ def _distributeSynsUniformly (self, secList, numSyns): secLengths = [self.secs[s]['hObj'].L for s in secList] else: secLengths = [1.0 for s in secList] - if sim.cfg.verbose: - print((' Section lengths not available to distribute synapses in cell %d'%self.gid)) + logger.debug(' Section lengths not available to distribute synapses in cell %d'%self.gid) secLengths = [x for x in secLengths if isinstance(x, Number)] totLength = sum(secLengths) @@ -1315,9 +1311,9 @@ def _addConnPlasticity (self, params, sec, netcon, weightIndex): self.conns[-1]['hSTDPprecon'] = precon self.conns[-1]['hSTDPpstcon'] = pstcon self.conns[-1]['STDPdata'] = {'preGid':params['preGid'], 'postGid': self.gid, 'receptor': weightIndex} # Not used; FYI only; store here just so it's all in one place - if sim.cfg.verbose: print(' Added STDP plasticity to synaptic mechanism') + logger.debug(' Added STDP plasticity to synaptic mechanism') except: - print('Error: exception when adding plasticity using %s mechanism' % (plasticity['mech'])) + logger.warning('Error: exception when adding plasticity using %s mechanism' % (plasticity['mech'])) diff --git a/netpyne/cell/inputs.py b/netpyne/cell/inputs.py index bb77040e7..8b876a6d0 100644 --- a/netpyne/cell/inputs.py +++ b/netpyne/cell/inputs.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import @@ -18,6 +17,7 @@ from neuron import h import numpy as np +from netpyne.logger import logger def createRhythmicPattern(params, rand): """ @@ -50,7 +50,7 @@ def createRhythmicPattern(params, rand): distribution = params.get('distribution', 'normal') if eventsPerCycle > 2 or eventsPerCycle <= 0: - print("eventsPerCycle should be either 1 or 2, trying 2") + logger.warning("eventsPerCycle should be either 1 or 2, trying 2") eventsPerCycle = 2 # If frequency is 0, create empty vector if input times if not freq: @@ -98,7 +98,7 @@ def createRhythmicPattern(params, rand): t_input = t_input[t_input > 0] t_input.sort() else: - print("Indicated distribution not recognized. Not making any alpha feeds.") + logger.warning("Indicated distribution not recognized. Not making any alpha feeds.") t_input = [] return np.array(t_input) diff --git a/netpyne/cell/pointCell.py b/netpyne/cell/pointCell.py index 69d12ceb7..6abe58563 100644 --- a/netpyne/cell/pointCell.py +++ b/netpyne/cell/pointCell.py @@ -7,7 +7,6 @@ Contributors: salvadordura@gmail.com, samnemo@gmail.com """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -23,7 +22,7 @@ import numpy as np from .cell import Cell from ..specs import Dict - +from netpyne.logger import logger ############################################################################### # @@ -78,7 +77,7 @@ def createNEURONObj (self): try: self.hPointp = getattr(h, self.tags['cellModel'])() except: - print("Error creating point process mechanism %s in cell with gid %d" % (self.tags['cellModel'], self.gid)) + logger.warning("Error creating point process mechanism %s in cell with gid %d" % (self.tags['cellModel'], self.gid)) return # if rate is list with 2 items generate random value from uniform @@ -109,7 +108,7 @@ def createNEURONObj (self): # add random num generator, and set number and seed for NetStims if self.tags['cellModel'] == 'NetStim': - if sim.cfg.verbose: print("Creating a NetStim pointcell") + logger.debug("Creating a NetStim pointcell") rand = h.Random() self.hRandom = rand if 'number' not in self.params: @@ -157,7 +156,6 @@ def createNEURONObj (self): # rand.negexp(noise*interval) # vec.setrand(rand) # negexpInterval= np.array(vec) - # #print negexpInterval # spkTimes = np.cumsum(fixedInterval + negexpInterval) + (start - interval*(1-noise)) if numSpks < 100: @@ -179,7 +177,7 @@ def createNEURONObj (self): spkTimes = np.cumsum(fixedInterval + negexpInterval) + (start - interval*(1-noise)) else: - print('\nError: exceeded the maximum number of VecStim spikes per cell (%d > %d)' % (numSpks, maxReproducibleSpks)) + logger.warning('Error: exceeded the maximum number of VecStim spikes per cell (%d > %d)' % (numSpks, maxReproducibleSpks)) return # spikePattern @@ -207,7 +205,7 @@ def createNEURONObj (self): from .inputs import createGaussPattern spkTimes = createGaussPattern(self.params['spikePattern'], rand) else: - print('\nError: invalid spikePattern type %s' % (patternType)) + logger.warning('Error: invalid spikePattern type %s' % (patternType)) return vec = h.Vector(len(spkTimes)) @@ -216,7 +214,7 @@ def createNEURONObj (self): elif 'spkTimes' in self.params: spkTimes = self.params['spkTimes'] if type(spkTimes) not in (list,tuple,np.array): - print('\nError: VecStim "spkTimes" needs to be a list, tuple or numpy array') + logger.warning('Error: VecStim "spkTimes" needs to be a list, tuple or numpy array') return spkTimes = np.array(spkTimes) vec = h.Vector(len(spkTimes)) @@ -225,14 +223,14 @@ def createNEURONObj (self): elif 'spkTimes' in self.tags: spkTimes = self.tags['spkTimes'] if type(spkTimes) not in (list,tuple,np.array): - print('\nError: VecStim "spkTimes" needs to be a list, tuple or numpy array') + logger.warning('Error: VecStim "spkTimes" needs to be a list, tuple or numpy array') return spkTimes = np.array(spkTimes) vec = h.Vector(len(spkTimes)) # missing params else: - print('\nError: VecStim requires interval, rate or spkTimes') + logger.warning('Error: VecStim requires interval, rate or spkTimes') return # pulse list: start, end, rate, noise @@ -251,12 +249,12 @@ def createNEURONObj (self): elif 'rate' in pulse: interval = 1000.0/pulse['rate'] else: - print('Error: Vecstim pulse missing "rate" or "interval" parameter') + logger.warning('Error: Vecstim pulse missing "rate" or "interval" parameter') return # check start,end and noise params if any([x not in pulse for x in ['start', 'end']]): - print('Error: Vecstim pulse missing "start" and/or "end" parameter') + logger.warning('Error: Vecstim pulse missing "start" and/or "end" parameter') return else: noise = pulse['noise'] if 'noise' in pulse else 0.0 @@ -349,7 +347,7 @@ def addConn (self, params, netStimParams = None): # Avoid self connections if params['preGid'] == self.gid: - if sim.cfg.verbose: print(' Error: attempted to create self-connection on cell gid=%d, section=%s '%(self.gid, params.get('sec'))) + logger.debug(' Error: attempted to create self-connection on cell gid=%d, section=%s '%(self.gid, params.get('sec'))) return # if self-connection return # Weight @@ -438,15 +436,14 @@ def addConn (self, params, netStimParams = None): self.conns[-1]['shapeWeightVec'].play(netcon._ref_weight[weightIndex], self.conns[-1]['shapeTimeVec']) - if sim.cfg.verbose: - sec = params['sec'] - loc = params['loc'] - preGid = netStimParams['source']+' NetStim' if netStimParams else params['preGid'] - try: - print((' Created connection preGid=%s, postGid=%s, sec=%s, loc=%.4g, synMech=%s, weight=%.4g, delay=%.2f' - % (preGid, self.gid, sec, loc, params['synMech'], weights[i], delays[i]))) - except: - print((' Created connection preGid=%s' % (preGid))) + sec = params['sec'] + loc = params['loc'] + preGid = netStimParams['source']+' NetStim' if netStimParams else params['preGid'] + try: + logger.debug(' Created connection preGid=%s, postGid=%s, sec=%s, loc=%.4g, synMech=%s, weight=%.4g, delay=%.2f' + % (preGid, self.gid, sec, loc, params['synMech'], weights[i], delays[i])) + except: + logger.debug(' Created connection preGid=%s' % (preGid)) def initV (self): @@ -458,8 +455,7 @@ def wrapper(*args, **kwargs): try: name(*args,**kwargs) except: - if sim.cfg.verbose: - print("Error: Function '%s' not yet implemented for Point Neurons" % name) + logger.debug("Error: Function '%s' not yet implemented for Point Neurons" % name) return wrapper def _addConnPlasticity (self, params, sec, netcon, weightIndex): @@ -479,14 +475,14 @@ def _addConnPlasticity (self, params, sec, netcon, weightIndex): self.conns[-1]['hSTDPprecon'] = precon self.conns[-1]['hSTDPpstcon'] = pstcon self.conns[-1]['STDPdata'] = {'preGid':params['preGid'], 'postGid': self.gid, 'receptor': weightIndex} # Not used; FYI only; store here just so it's all in one place - if sim.cfg.verbose: print(' Added STDP plasticity to synaptic mechanism') + logger.debug(' Added STDP plasticity to synaptic mechanism') except: - print('Error: exception when adding plasticity using %s mechanism' % (plasticity['mech'])) + logger.warning('Error: exception when adding plasticity using %s mechanism' % (plasticity['mech'])) # def modify (self): - # print 'Error: Function not yet implemented for Point Neurons' + # logger.warning 'Error: Function not yet implemented for Point Neurons' # def addSynMechsNEURONObj (self): - # print 'Error: Function not yet implemented for Point Neurons' + # logger.warning 'Error: Function not yet implemented for Point Neurons' diff --git a/netpyne/conversion/excel.py b/netpyne/conversion/excel.py index 742101c00..4fb9d5f9a 100644 --- a/netpyne/conversion/excel.py +++ b/netpyne/conversion/excel.py @@ -3,17 +3,17 @@ """ -from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import - from builtins import open from builtins import range from builtins import str from future import standard_library standard_library.install_aliases() +from netpyne.logger import logger + def importConnFromExcel (fileName, sheetName): """ Function for/to @@ -54,7 +54,7 @@ def importConnFromExcel (fileName, sheetName): f.write(connText) # write starting text for row in range(1,numRows+1): if sheet.cell(row=row, column=colProb).value: # if not empty row - print('Creating conn rule for row ' + str(row)) + logger.info('Creating conn rule for row ' + str(row)) # read row values pre = sheet.cell(row=row, column=colPreTags).value post = sheet.cell(row=row, column=colPostTags).value diff --git a/netpyne/conversion/neuromlFormat.py b/netpyne/conversion/neuromlFormat.py index 55b773cad..d110a4aec 100644 --- a/netpyne/conversion/neuromlFormat.py +++ b/netpyne/conversion/neuromlFormat.py @@ -3,14 +3,12 @@ """ -from __future__ import print_function from __future__ import division from __future__ import absolute_import - from builtins import str - from builtins import range +from netpyne.logger import logger try: import neuroml @@ -23,7 +21,7 @@ from neuron import h pc = h.ParallelContext() # MPI: Initialize the ParallelContext class if int(pc.id()) == 0: - print('Note: pyNeuroML version %s is installed but at least v%s is required'%(pynml_ver,min_pynml_ver_required)) + logger.warning('pyNeuroML version %s is installed but at least v%s is required'%(pynml_ver,min_pynml_ver_required)) neuromlExists = False else: neuromlExists = True @@ -31,8 +29,8 @@ except ImportError: from neuron import h pc = h.ParallelContext() # MPI: Initialize the ParallelContext class - if False and int(pc.id()) == 0: # only print for master node - print('Warning: NeuroML import failed; import/export functions for NeuroML will not be available. \n To install the pyNeuroML & libNeuroML Python packages visit: https://www.neuroml.org/getneuroml') + if False and int(pc.id()) == 0: # only log for master node + logger.warning('NeuroML import failed; import/export functions for NeuroML will not be available. \n To install the pyNeuroML & libNeuroML Python packages visit: https://www.neuroml.org/getneuroml') neuromlExists = False import pprint; pp = pprint.PrettyPrinter(depth=6) @@ -50,12 +48,12 @@ def _convertNetworkRepresentation(net, gids_vs_pop_indices): nn = {} for np_pop in list(net.pops.values()): - print("Adding conns for: %s"%np_pop.tags) + logger.info("Adding conns for: %s"%np_pop.tags) if 'cellModel' in np_pop.tags and not np_pop.tags['cellModel'] == 'NetStim': for cell in net.cells: if cell.gid in np_pop.cellGids: popPost, indexPost = gids_vs_pop_indices[cell.gid] - #print("Cell %s: %s\n %s[%i]\n"%(cell.gid,cell.tags,popPost, indexPost)) + #logger.info("Cell %s: %s\n %s[%i]\n"%(cell.gid,cell.tags,popPost, indexPost)) for conn in cell.conns: preGid = conn['preGid'] if not preGid == 'NetStim': @@ -67,7 +65,7 @@ def _convertNetworkRepresentation(net, gids_vs_pop_indices): synMech = conn['synMech'] #threshold = conn['threshold'] - if sim.cfg.verbose: print(" Conn %s[%i]->%s[%i] with %s, w: %s, d: %s"%(popPre, indexPre,popPost, indexPost, synMech, weight, delay)) + logger.debug(" Conn %s[%i]->%s[%i] with %s, w: %s, d: %s"%(popPre, indexPre,popPost, indexPost, synMech, weight, delay)) projection_info = (popPre,popPost,synMech) if not projection_info in list(nn.keys()): @@ -75,7 +73,7 @@ def _convertNetworkRepresentation(net, gids_vs_pop_indices): nn[projection_info].append({'indexPre':indexPre,'indexPost':indexPost,'weight':weight,'delay':delay}) else: - #print(" Conn NetStim->%s[%s] with %s"%(popPost, indexPost, '??')) + #logger.info(" Conn NetStim->%s[%s] with %s"%(popPost, indexPost, '??')) pass return nn @@ -90,15 +88,15 @@ def _convertStimulationRepresentation(net,gids_vs_pop_indices, nml_doc, populati for np_pop in list(net.pops.values()): if 'cellModel' in np_pop.tags and not np_pop.tags['cellModel'] == 'NetStim': - print("Adding stims for: %s"%np_pop.tags) + logger.info("Adding stims for: %s"%np_pop.tags) for cell in net.cells: if cell.gid in np_pop.cellGids: pop, index = gids_vs_pop_indices[cell.gid] - #print(" Cell %s:\n Tags: %s\n Pop: %s[%i]\n Stims: %s\n Conns: %s\n"%(cell.gid,cell.tags,pop, index,cell.stims,cell.conns)) + #logger.info(" Cell %s:\n Tags: %s\n Pop: %s[%i]\n Stims: %s\n Conns: %s\n"%(cell.gid,cell.tags,pop, index,cell.stims,cell.conns)) for stim in cell.stims: if stim['type']=='IClamp': il_id = '%s__%s'%(stim['label'],pop) - #print(' adding IClamp stim %s: %s '%(il_id,stim)) + #logger.info(' adding IClamp stim %s: %s '%(il_id,stim)) input_list = None for ii in nml_doc.networks[0].input_lists: if ii.id == il_id: @@ -115,7 +113,7 @@ def _convertStimulationRepresentation(net,gids_vs_pop_indices, nml_doc, populati input_list.input.append(input) elif stim['type']=='NetStim': - #print(' adding NetStim stim: %s'%stim) + #logger.info(' adding NetStim stim: %s'%stim) ref = stim['source'] rate = stim['rate'] @@ -141,7 +139,6 @@ def _convertStimulationRepresentation(net,gids_vs_pop_indices, nml_doc, populati stims[stim_info].append({'index':index,'weight':weight,'delay':delay}) #stims[stim_info].append({'index':index,'weight':weight,'delay':delay,'threshold':threshold}) - #print(stims) return stims # @@ -182,7 +179,7 @@ def _export_synapses(net, nml_doc): syn_types = {} for id,syn in net.params.synMechParams.items(): syn_types[id]=syn['mod'] - if sim.cfg.verbose: print('Exporting details of syn: %s'%syn) + logger.debug('Exporting details of syn: %s'%syn) if syn['mod'] == 'Exp2Syn': syn0 = neuroml.ExpTwoSynapse(id=id, gbase='1uS', @@ -293,7 +290,7 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', import random myrandom = random.Random(12345) - print("Exporting the network to NeuroML 2, reference: %s, connections: %s, stimulations: %s, format: %s"%(reference,connections, stimulations, format)) + logger.info("Exporting the network to NeuroML 2, reference: %s, connections: %s, stimulations: %s, format: %s"%(reference, connections, stimulations, format)) import neuroml import neuroml.writers as writers @@ -316,7 +313,7 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', for np_pop_id in net.pops: np_pop = net.pops[np_pop_id] - if sim.cfg.verbose: print("-- Adding a population %s: %s"%(np_pop_id,np_pop.tags)) + logger.debug("-- Adding a population %s: %s"%(np_pop_id,np_pop.tags)) cell_param_set = {} @@ -324,9 +321,9 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', 'cellType' in np_pop.tags and \ np_pop.tags['cellType'] in net.params.cellParams.keys(): ## SIMPLE POP/CELLTYPE FORMAT - if sim.cfg.verbose: print("Assuming simple pop/cell type format...") + logger.debug("Assuming simple pop/cell type format...") cell_param_set = net.params.cellParams[np_pop.tags['cellType']] - if sim.cfg.verbose: print(" -- Simple format for populations being used for pop %s with cell %s: %s"%(np_pop_id,np_pop.tags['cellType'],cell_param_set)) + logger.debug(" -- Simple format for populations being used for pop %s with cell %s: %s"%(np_pop_id,np_pop.tags['cellType'],cell_param_set)) np_pop.tags['cellModel'] = np_pop.tags['cellType'] else: @@ -335,35 +332,33 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', cell_param_set0 = net.params.cellParams[cell_name] someMatches = False someMisMatches = False - if sim.cfg.verbose: print(" -- Checking whether pop %s matches %s: %s"%(np_pop_id,cell_name,cell_param_set0)) + logger.debug(" -- Checking whether pop %s matches %s: %s"%(np_pop_id,cell_name,cell_param_set0)) if 'conds' in cell_param_set0: for cond in cell_param_set0['conds']: if len(cell_param_set0['conds'][cond])>0: if cond in np_pop.tags and cell_param_set0['conds'][cond] == np_pop.tags[cond]: - if sim.cfg.verbose: print(" Cond: %s matches..."%cond) + logger.debug(" Cond: %s matches..."%cond) someMatches = True else: - if sim.cfg.verbose: print(" Cond: %s DOESN'T match (%s != %s)..."%(cond,cell_param_set0['conds'][cond],np_pop.tags[cond] if cond in np_pop.tags else "???")) + logger.debug(" Cond: %s DOESN'T match (%s != %s)..."%(cond,cell_param_set0['conds'][cond],np_pop.tags[cond] if cond in np_pop.tags else "???")) someMisMatches = True if someMatches and not someMisMatches: - if sim.cfg.verbose: print(" Matches: %s"%cell_param_set0) + logger.debug(" Matches: %s"%cell_param_set0) cell_param_set.update(cell_param_set0) if 'cellModel' in np_pop.tags and not np_pop.tags['cellModel'] == 'NetStim' and len(cell_param_set)==0: - print('Is %s in %s...?'%(np_pop_id, net.params.cellParams.keys())) + logger.info('Is %s in %s...?'%(np_pop_id, net.params.cellParams.keys())) if np_pop_id in net.params.cellParams: - print('Proceeding with assumption %s defines which cellParams...'%np_pop) + logger.info('Proceeding with assumption %s defines which cellParams...'%np_pop) cell_param_set0 = net.params.cellParams[np_pop_id] cell_param_set.update(cell_param_set0) cell_param_set['conds'] = {} cell_param_set['conds']['cellType'] = np_pop.tags['cellType'] cell_param_set['conds']['cellModel'] = np_pop.tags['cellModel'] - print('Now cell params for %s are: %s...'%(np_pop_id,cell_param_set)) - + logger.info('Now cell params for %s are: %s...'%(np_pop_id,cell_param_set)) else: - - print("Error, could not find cellParams for %s"%np_pop.tags) + logger.warning("Error, could not find cellParams for %s"%np_pop.tags) exit(-1) if not np_pop.tags['cellModel'] == 'NetStim': @@ -381,12 +376,11 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', populations_vs_components[np_pop.tags['pop']]=cell_id - if sim.cfg.verbose: print("Checking whether to add cell: %s; already added: %s"%(cell_param_set,cells_added)) + logger.debug("Checking whether to add cell: %s; already added: %s"%(cell_param_set,cells_added)) if 'cellModel' in np_pop.tags and not np_pop.tags['cellModel'] == 'NetStim' and not cell_id in cells_added: - if sim.cfg.verbose: print("--------------- Adding a cell from pop %s: \n%s"%(np_pop.tags,cell_param_set)) - - # print("===== Adding the cell %s: \n%s"%(cell_name,pp.pprint(cell_param_set))) + logger.debug("--------------- Adding a cell from pop %s: \n%s"%(np_pop.tags,cell_param_set)) + # logger.debug("===== Adding the cell %s: \n%s"%(cell_name,pp.pprint(cell_param_set))) # Single section; one known mechanism... soma = cell_param_set['secs']['soma'] @@ -398,7 +392,7 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', pproc = list(soma['pointps'].values())[0] - print("Assuming abstract cell with behaviour set by single point process: %s!"%pproc) + logger.info("Assuming abstract cell with behaviour set by single point process: %s!"%pproc) if pproc['mod'] == 'Izhi2007b': izh = neuroml.Izhikevich2007Cell(id=cell_id) @@ -416,10 +410,10 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', nml_doc.izhikevich2007_cells.append(izh) else: - print("Unknown point process: %s; can't convert to NeuroML 2 equivalent!"%pproc['mod']) + logger.warning("Unknown point process: %s; can't convert to NeuroML 2 equivalent!"%pproc['mod']) exit(1) else: - print("Assuming normal cell with behaviour set by ion channel mechanisms!") + logger.info("Assuming normal cell with behaviour set by ion channel mechanisms!") cell = neuroml.Cell(id=cell_id) cell.notes = "Cell exported from NetPyNE:\n%s"%cell_param_set @@ -459,11 +453,11 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', if np_sec['topol']['parentX'] == 0: nml_seg.fract_along = 0 if not ((np_sec['topol']['parentX'] == 1.0 or np_sec['topol']['parentX'] == 0.0) and np_sec['topol']['childX'] == 0.0): - print("Currently only support cell topol with (parentX == 1 or 0) and childX == 0") + logger.warning("Currently only support cell topol with (parentX == 1 or 0) and childX == 0") exit(1) if not ( ('pt3d' not in np_sec['geom']) or len(np_sec['geom']['pt3d'])==0 or len(np_sec['geom']['pt3d'])==2 ): - print("Currently only support cell geoms with 2 pt3ds (or 0 and diam/L specified): %s"%np_sec['geom']) + logger.warning("Currently only support cell geoms with 2 pt3ds (or 0 and diam/L specified): %s"%np_sec['geom']) exit(1) if ('pt3d' not in np_sec['geom'] or len(np_sec['geom']['pt3d'])==0): @@ -535,13 +529,13 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', for mech_name in list(np_sec['mechs'].keys()): mech = np_sec['mechs'][mech_name] if mech_name in mechs_to_ignore: - print('Ignoring mechanism: %s'%mechs_to_ignore) + logger.info('Ignoring mechanism: %s'%mechs_to_ignore) elif mech_name == 'hh' or mech_name == 'hh2': for chan in chans_doc.ion_channel_hhs: if (chan.id == 'leak_hh' or chan.id == 'na_hh' or chan.id == 'k_hh'): if not chan.id in chans_added: - print(" > Adding %s since it's not in %s"%(chan.id, chans_added)) + logger.info(" > Adding %s since it's not in %s"%(chan.id, chans_added)) nml_doc.ion_channel_hhs.append(chan) chans_added.append(chan.id) @@ -582,7 +576,7 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', ion='non_specific') mp.channel_densities.append(leak_cd) else: - print("Currently NML2 export only supports mech hh, not: %s"%mech_name) + logger.warning("Currently NML2 export only supports mech hh, not: %s"%mech_name) exit(1) @@ -593,18 +587,17 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', for np_pop in list(net.pops.values()): index = 0 - print("Adding population: %s"%np_pop.tags) + logger.info("Adding population: %s"%np_pop.tags) type = 'populationList' if 'cellModel' in np_pop.tags and not np_pop.tags['cellModel'] == 'NetStim': comp_id = populations_vs_components[np_pop.tags['pop']] - #print(net.params.cellParams) if np_pop.tags['pop'] in net.params.cellParams: cell_param_set = net.params.cellParams[np_pop.tags['pop']] else: cell_param_set = net.params.cellParams[np_pop.tags['cellType']] - print('Population (%s) has comp: %s (%s)'%(np_pop.tags,comp_id, cell_param_set)) + logger.info('Population (%s) has comp: %s (%s)'%(np_pop.tags,comp_id, cell_param_set)) pop = neuroml.Population(id=np_pop.tags['pop'],component=comp_id, type=type) @@ -636,7 +629,7 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', prefix = "NetConn" popPre,popPost,synMech = proj_info - if sim.cfg.verbose: print("Adding proj: %s->%s (%s)"%(popPre,popPost,synMech)) + logger.debug("Adding proj: %s->%s (%s)"%(popPre,popPost,synMech)) if syn_types[synMech]!='ElectSyn': projection = neuroml.Projection(id="%s_%s_%s_%s"%(prefix,popPre, popPost,synMech), @@ -657,7 +650,7 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', for conn in nn[proj_info]: - if sim.cfg.verbose: print("Adding conn %s"%conn) + logger.debug("Adding conn %s"%conn) if syn_types[synMech]!='ElectSyn': connection = neuroml.ConnectionWD(id=index, \ @@ -714,7 +707,7 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', if stimulations: for ssp in net.params.stimSourceParams: ss = net.params.stimSourceParams[ssp] - print('Adding the stim source: %s = %s'%(ssp,ss)) + logger.info('Adding the stim source: %s = %s'%(ssp,ss)) if ss['type']=='IClamp': pg = neuroml.PulseGenerator(id=ssp, delay="%sms"%ss['del'], @@ -729,7 +722,7 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', for stim_info in list(stims.keys()): name_stim, post_pop, rate, noise, synMech = stim_info - if sim.cfg.verbose: print("Adding a NetStim stim: %s"%[stim_info]) + logger.debug("Adding a NetStim stim: %s"%[stim_info]) if noise==0: source = neuroml.SpikeGenerator(id=name_stim,period="%ss"%(1./rate)) @@ -754,7 +747,7 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', count = 0 for stim in stims[stim_info]: - #print(" Adding stim: %s"%stim) + #logger.debug(" Adding stim: %s"%stim) connection = neuroml.ConnectionWD(id=count, \ pre_cell_id="../%s[%i]"%(stim_pop.id, count), \ @@ -773,7 +766,7 @@ def exportNeuroML2(reference, connections=True, stimulations=True, format='xml', nml_file_name = '%s.net.nml'%reference if format=='xml': - print("Writing %s to %s (%s)"%(nml_doc, nml_file_name, nml_file_name.__class__)) + logger.info("Writing %s to %s (%s)"%(nml_doc, nml_file_name, nml_file_name.__class__)) writers.NeuroMLWriter.write(nml_doc, nml_file_name) elif format=='hdf5': nml_file_name+='.h5' @@ -886,7 +879,7 @@ def _get_prox_dist(self, seg, seg_ids_vs_segs): def handle_network(self, network_id, notes, temperature=None): if temperature: self.simConfig.hParams['celsius'] = pynml.convert_to_units(temperature,'degC') - print("Setting global temperature to %s"%self.simConfig.hParams['celsius']) + logger.info("Setting global temperature to %s"%self.simConfig.hParams['celsius']) # @@ -894,7 +887,7 @@ def handle_network(self, network_id, notes, temperature=None): # def handle_population(self, population_id, component, size, component_obj, properties={}): - if self.verbose: print("A population: %s with %i of %s (%s)"%(population_id,size,component,component_obj)) + if self.verbose: logger.info("A population: %s with %i of %s (%s)"%(population_id,size,component,component_obj)) self.pop_ids_vs_components[population_id] = component_obj @@ -911,7 +904,7 @@ def handle_population(self, population_id, component, size, component_obj, prope popInfo['numCells'] = size if population_id=='pop': - print("\n\n*****************************\nReconsider calling your population 'pop'; it leads to some errors in NetPyNE!\nGiving up...\n*****************************\n\n") + logger.warning("*****************************\nReconsider calling your population 'pop'; it leads to some errors in NetPyNE!\nGiving up...\n*****************************\n\n") quit() self.popParams[population_id] = popInfo @@ -983,7 +976,7 @@ def handle_population(self, population_id, component, size, component_obj, prope self.pop_ids_vs_cumulative_lengths[population_id] = cumulative_lengths for section in list(cellRule['secs'].keys()): - #print("ggg %s: %s"%(section,ordered_segs[section])) + #logger.info("ggg %s: %s"%(section,ordered_segs[section])) for seg in ordered_segs[section]: prox, dist = self._get_prox_dist(seg, seg_ids_vs_segs) @@ -1027,7 +1020,7 @@ def handle_population(self, population_id, component, size, component_obj, prope cellRule['secLists'][seg_grp.id] = seg_grps_vs_nrn_sections[seg_grp.id] for ip in seg_grp.inhomogeneous_parameters: - #print("=====================\ninhomogeneousParameter: %s"%ip) + #logger.info("=====================\ninhomogeneousParameter: %s"%ip) inhomogeneous_parameters[seg_grp.id] = {} @@ -1047,7 +1040,7 @@ def handle_population(self, population_id, component, size, component_obj, prope last = sec_segs[nrn_sec][-1] start_len = path_prox[seg_grp.id][first.id] end_len = path_dist[seg_grp.id][last.id] - #print(" Seg: %s (%s) -> %s (%s)"%(first,start_len,last,end_len)) + #logger.info(" Seg: %s (%s) -> %s (%s)"%(first,start_len,last,end_len)) inhomogeneous_parameters[seg_grp.id][nrn_sec] = (start_len,end_len) @@ -1150,7 +1143,7 @@ def handle_population(self, population_id, component, size, component_obj, prope grp = vp.segment_groups path_vals = inhomogeneous_parameters[grp] expr = iv.value.replace('exp(','math.exp(') - #print("variable_parameter: %s, %s, %s"%(grp,iv, expr)) + #logger.info("variable_parameter: %s, %s, %s"%(grp,iv, expr)) for section_name in seg_grps_vs_nrn_sections[grp]: path_start, path_end = inhomogeneous_parameters[grp][section_name] @@ -1161,7 +1154,7 @@ def handle_population(self, population_id, component, size, component_obj, prope nseg = cellRule['secs'][section_name]['geom']['nseg'] if 'nseg' in cellRule['secs'][section_name]['geom'] else 1 - #print(" Cond dens %s: %s S_per_cm2 (%s um) -> %s S_per_cm2 (%s um); nseg = %s"%(section_name,gmax_start,path_start,gmax_end,path_end, nseg)) + #logger.info(" Cond dens %s: %s S_per_cm2 (%s um) -> %s S_per_cm2 (%s um); nseg = %s"%(section_name,gmax_start,path_start,gmax_end,path_end, nseg)) gmax = [] for fract in [(2*i+1.0)/(2*nseg) for i in range(nseg)]: @@ -1170,7 +1163,7 @@ def handle_population(self, population_id, component, size, component_obj, prope gmax_i = pynml.convert_to_units('%s S_per_m2'%eval(expr),'S_per_cm2') - #print(" Point %s at %s = %s"%(p,fract, gmax_i)) + #logger.info(" Point %s at %s = %s"%(p,fract, gmax_i)) gmax.append(gmax_i) if cm.ion_channel=='pas': @@ -1239,7 +1232,7 @@ def handle_population(self, population_id, component, size, component_obj, prope #popInfo['cellType'] = component - if self.verbose: print("Abstract cell: %s"%(isinstance(component_obj,BaseCell))) + logger.debug("Abstract cell: %s"%(isinstance(component_obj,BaseCell))) if hasattr(component_obj,'thresh'): threshold = pynml.convert_to_units(component_obj.thresh,'mV') @@ -1266,7 +1259,7 @@ def handle_population(self, population_id, component, size, component_obj, prope area = math.pi * default_diam * default_diam specCapNeu = 10e13 * capTotSI / area - #print("c: %s, area: %s, sc: %s"%(capTotSI, area, specCapNeu)) + #logger.info("c: %s, area: %s, sc: %s"%(capTotSI, area, specCapNeu)) soma['geom']['cm'] = specCapNeu # PyNN cells @@ -1279,7 +1272,7 @@ def handle_population(self, population_id, component, size, component_obj, prope else: soma['geom']['cm'] = 318.319 - #print("sc: %s"%(soma['geom']['cm'])) + #logger.info("sc: %s"%(soma['geom']['cm'])) soma['pointps'][component] = {'mod':component} cellRule['secs'] = {'soma': soma} # add sections to dict @@ -1322,11 +1315,10 @@ def _convert_to_nrn_section_location(self, population_id, seg_id, fract_along): to_start = 0.0 if ind==0 else lens[ind-1] to_end = lens[ind] tot = lens[-1] - #print to_start, to_end, tot, ind, seg, seg_id fract_sec = (to_start + fract_along *(to_end-to_start))/(tot) ind+=1 - #print("============= Converted %s:%s on pop %s to %s on %s"%(seg_id, fract_along, population_id, nrn_sec, fract_sec)) + #logger.info("============= Converted %s:%s on pop %s to %s on %s"%(seg_id, fract_along, population_id, nrn_sec, fract_sec)) return nrn_sec, fract_sec # @@ -1346,7 +1338,7 @@ def handle_location(self, id, population_id, component, x, y, z): # def handle_projection(self, projName, prePop, postPop, synapse, hasWeights=False, hasDelays=False, type="projection", synapse_obj=None, pre_synapse_obj=None): - if self.verbose: print("A projection: %s (%s) from %s -> %s with syn: %s" % (projName, type, prePop, postPop, synapse)) + print.debug("A projection: %s (%s) from %s -> %s with syn: %s" % (projName, type, prePop, postPop, synapse)) self.projection_infos[projName] = (projName, prePop, postPop, synapse, type) self.connections[projName] = [] @@ -1400,7 +1392,7 @@ def handle_input_list(self, inputListId, population_id, component, size, input_c if component=='IClamp': - print("\n\n*****************************\nReconsider calling your input 'IClamp' in NeuroML; it leads to some errors due to clash with native NEURON IClamp!\n*****************************\n\n") + logger.warning("*****************************\nReconsider calling your input 'IClamp' in NeuroML; it leads to some errors due to clash with native NEURON IClamp!\n*****************************\n\n") exit() # TODO: build just one stimLists/stimSources entry for the inputList @@ -1441,7 +1433,7 @@ def handle_single_input(self, inputListId, id, cellId, segId = 0, fract = 0.5, w if weight!=1: self.stimLists[stimId]['weight'] = weight - if self.verbose: print("Input: %s[%s] on %s, cellId: %i, seg: %i (nrn: %s), fract: %f (nrn: %f); ref: %s; weight: %s" % (inputListId,id,pop_id,cellId,segId,nrn_sec,fract,nrn_fract,stimId, weight)) + print.debug("Input: %s[%s] on %s, cellId: %i, seg: %i (nrn: %s), fract: %f (nrn: %f); ref: %s; weight: %s" % (inputListId,id,pop_id,cellId,segId,nrn_sec,fract,nrn_fract,stimId, weight)) # TODO: build just one stimLists/stimSources entry for the inputList # Issue: how to specify the sec/loc per individual stim?? @@ -1485,7 +1477,7 @@ def importNeuroML2(fileName, simConfig, simulate=True, analyze=True): import pprint pp = pprint.PrettyPrinter(indent=4) - print("Importing NeuroML 2 network from: %s"%fileName) + logger.info("Importing NeuroML 2 network from: %s"%fileName) nmlHandler = None @@ -1506,11 +1498,11 @@ def importNeuroML2(fileName, simConfig, simulate=True, analyze=True): nmlHandler.finalise() - print('Finished import of NeuroML2; populations vs gids NML has calculated: ') + logger.info('Finished import of NeuroML2; populations vs gids NML has calculated: ') for pop in nmlHandler.gids: g = nmlHandler.gids[pop] - print(' %s: %s'%(pop, g if len(g)<10 else str(g[:8]).replace(']',', ..., %s]'%g[-1]))) - #print('Connections: %s'%nmlHandler.connections) + logger.info(' %s: %s'%(pop, g if len(g)<10 else str(g[:8]).replace(']',', ..., %s]'%g[-1]))) + #logger.info('Connections: %s'%nmlHandler.connections) if fileName.endswith(".h5"): @@ -1527,8 +1519,8 @@ def importNeuroML2(fileName, simConfig, simulate=True, analyze=True): nmlHandler.finalise() - print('Finished import: %s'%nmlHandler.gids) - #print('Connections: %s'%nmlHandler.connections) + logger.info('Finished import: %s'%nmlHandler.gids) + #logger.info('Connections: %s'%nmlHandler.connections) sim.initialize(netParams, simConfig) # create network object and set cfg and net params @@ -1541,13 +1533,13 @@ def importNeuroML2(fileName, simConfig, simulate=True, analyze=True): # Check gids equal.... for popLabel,pop in sim.net.pops.items(): - if sim.cfg.verbose: print("gid: %s: %s, %s"%(popLabel,pop, pop.cellGids)) + logger.debug("gid: %s: %s, %s"%(popLabel,pop, pop.cellGids)) for gid in pop.cellGids: assert gid in nmlHandler.gids[popLabel] for proj_id in list(nmlHandler.projection_infos.keys()): projName, prePop, postPop, synapse, ptype = nmlHandler.projection_infos[proj_id] - if sim.cfg.verbose: print("Creating connections for %s (%s): %s->%s via %s"%(projName, ptype, prePop, postPop, synapse)) + logger.debug("Creating connections for %s (%s): %s->%s via %s"%(projName, ptype, prePop, postPop, synapse)) preComp = nmlHandler.pop_ids_vs_components[prePop] @@ -1608,7 +1600,7 @@ def importNeuroML2(fileName, simConfig, simulate=True, analyze=True): cell = sim.net.cells[sim.net.gid2lid[preGapParams['gid']]] cell.addConn(preGapParams) - print(' Number of connections on node %i: %i ' % (sim.rank, sum([len(cell.conns) for cell in sim.net.cells]))) + logger.info(' Number of connections on node %i: %i ' % (sim.rank, sum([len(cell.conns) for cell in sim.net.cells]))) @@ -1625,13 +1617,13 @@ def importNeuroML2(fileName, simConfig, simulate=True, analyze=True): sim.analysis.plotData() # plot spike raster ''' h('forall psection()') - h('forall if (ismembrane("na_ion")) { print "Na ions: ", secname(), ": ena: ", ena, ", nai: ", nai, ", nao: ", nao } ') - h('forall if (ismembrane("k_ion")) { print "K ions: ", secname(), ": ek: ", ek, ", ki: ", ki, ", ko: ", ko } ') - h('forall if (ismembrane("ca_ion")) { print "Ca ions: ", secname(), ": eca: ", eca, ", cai: ", cai, ", cao: ", cao } ')''' + h('forall if (ismembrane("na_ion")) { logger.debug "Na ions: ", secname(), ": ena: ", ena, ", nai: ", nai, ", nao: ", nao } ') + h('forall if (ismembrane("k_ion")) { logger.debug "K ions: ", secname(), ": ek: ", ek, ", ki: ", ki, ", ko: ", ko } ') + h('forall if (ismembrane("ca_ion")) { logger.debug "Ca ions: ", secname(), ": eca: ", eca, ", cai: ", cai, ", cao: ", cao } ')''' return nmlHandler.gids except: pass - #print(' Warning: An Exception occurred when loading NeuroML ...') + #logger.warning('An Exception occurred when loading NeuroML ...') diff --git a/netpyne/conversion/neuronPyHoc.py b/netpyne/conversion/neuronPyHoc.py index 36fbcf5e4..f3031c854 100644 --- a/netpyne/conversion/neuronPyHoc.py +++ b/netpyne/conversion/neuronPyHoc.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import @@ -18,6 +17,7 @@ from numbers import Number from neuron import h import importlib +from netpyne.logger import logger #h.load_file("stdrun.hoc") @@ -105,9 +105,9 @@ def importCellParams(fileName, labels, values, key = None): params = dict(list(zip(paramLabels, paramValues))) if removeFilePath: sys.path.remove(filePath) except: - print("Error loading cell parameter values from " + fileName) + logger.warning("Error loading cell parameter values from " + fileName) else: - print("Trying to import izhi params from a file without the .py extension") + logger.info("Trying to import izhi params from a file without the .py extension") return params @@ -304,7 +304,7 @@ def importCell(fileName, cellName, cellArgs = None, cellInstance = False): from netpyne.support.morphology import load cell = load(fileName) else: - print("File name should end in '.hoc', '.py', or '.swc'") + logger.warning("File name should end in '.hoc', '.py', or '.swc'") return secDic, secListDic, synMechs, globs = getCellParams(cell, varList, origGlob) @@ -363,7 +363,7 @@ def importCellsFromNet(netParams, fileName, labelList, condsList, cellNamesList, h.initnrn() if fileName.endswith('.hoc') or fileName.endswith('.tem'): - print('Importing from .hoc network not yet supported') + logger.warning('Importing from .hoc network not yet supported') return # h.load_file(fileName) # for cellName in cellNames: @@ -380,18 +380,18 @@ def importCellsFromNet(netParams, fileName, labelList, condsList, cellNamesList, removeFilePath = False moduleName = fileNameOnly.split('.py')[0] # remove .py to obtain module name os.chdir(filePath) - print('\nRunning network in %s to import cells into NetPyNE ...\n'%(fileName)) + logger.info('Running network in %s to import cells into NetPyNE ...\n'%fileName) from neuron import load_mechanisms load_mechanisms(filePath) tempModule = importlib.import_module(moduleName) modulePointer = tempModule if removeFilePath: sys.path.remove(filePath) else: - print("File name should be either .hoc or .py file") + logger.warning("File name should be either .hoc or .py file") return for label, conds, cellName in zip(labelList, condsList, cellNamesList): - print('\nImporting %s from %s ...'%(cellName, fileName)) + logger.info('\nImporting %s from %s ...'%(cellName, fileName)) exec('cell = tempModule' + '.' + cellName) #cell = getattr(modulePointer, cellName) # get cell object varList = mechVarList() @@ -568,7 +568,7 @@ def getCellParams(cell, varList={}, origGlob={}): try: synMech[varName] = point.__getattribute__(varName) except: - print('Could not read variable %s from synapse %s'%(varName,synMech['label'])) + logger.warning('Could not read variable %s from synapse %s'%(varName,synMech['label'])) if not any([_equal_dicts(synMech, synMech2, ignore_keys=['label']) for synMech2 in synMechs]): synMechs.append(synMech) @@ -584,7 +584,7 @@ def getCellParams(cell, varList={}, origGlob={}): # special condition for Izhi model, to set vinit=vr # if varName == 'vr': secDic[secName]['vinit'] = point.__getattribute__(varName) except: - print('Could not read %s variable from point process %s'%(varName,pointpName)) + logger.warning('Could not read %s variable from point process %s'%(varName,pointpName)) if pointps: secDic[secName]['pointps'] = pointps diff --git a/netpyne/conversion/pythonScript.py b/netpyne/conversion/pythonScript.py index 4f91d4058..3bf810b6a 100644 --- a/netpyne/conversion/pythonScript.py +++ b/netpyne/conversion/pythonScript.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import @@ -12,6 +11,7 @@ from future import standard_library standard_library.install_aliases() from netpyne import __version__ +from netpyne.logger import logger def createPythonScript(fname, netParams, simConfig): """ @@ -108,7 +108,7 @@ def header(title, spacer='-'): file.write(' sim.createExportNeuroML2(netParams=netParams, simConfig=simConfig, reference = nml_reference)\n') file.write(header('end script', spacer='=')) - print(("script saved on " + fname)) + logger.info("script saved on " + fname) except: - print(('error saving file: %s' %(sys.exc_info()[1]))) + logger.warning('error saving file: %s' %(sys.exc_info()[1])) diff --git a/netpyne/conversion/sonataImport.py b/netpyne/conversion/sonataImport.py index daa9cfc2f..c27fd5864 100644 --- a/netpyne/conversion/sonataImport.py +++ b/netpyne/conversion/sonataImport.py @@ -17,7 +17,7 @@ # soft-fail and suggest which packages to install from neuron import h pc = h.ParallelContext() # MPI: Initialize the ParallelContext class - if int(pc.id()) == 0: # only print for master node + if int(pc.id()) == 0: # only log for master node needed = [error.name] for pkg in ['tables', 'pyneuroml', 'neuroml']: try: @@ -25,19 +25,22 @@ __import__(pkg) except ModuleNotFoundError as error: needed.append(error.name) - print('Note: SONATA import failed; import/export functions for SONATA will not be available.\n' + - ' To use this feature install these Python packages: ', needed) + logger.warning('SONATA import failed; import/export functions for SONATA will not be available.\n' + + ' To use this feature install these Python packages: ') + logger.warning(needed) except ImportError as error: from neuron import h pc = h.ParallelContext() # MPI: Initialize the ParallelContext class - if int(pc.id()) == 0: # only print for master node - print('Note: SONATA import failed; import/export functions for SONATA will not be available.\n', error) + if int(pc.id()) == 0: # only log for master node + logger.warning('SONATA import failed; import/export functions for SONATA will not be available.\n') + logger.warning(error) from . import neuronPyHoc from .. import sim, specs import neuron from neuron import h +from netpyne.logger import logger h.load_file('stdgui.hoc') h.load_file('import3d.hoc') @@ -112,8 +115,7 @@ def _distributeCells(numCellsPop): if sim.nextHost>=sim.nhosts: sim.nextHost=0 - if sim.cfg.verbose: - print(("Distributed population of %i cells on %s hosts: %s, next: %s"%(numCellsPop,sim.nhosts,hostCells,sim.nextHost))) + logger.debug(("Distributed population of %i cells on %s hosts: %s, next: %s"%(numCellsPop,sim.nhosts,hostCells,sim.nextHost))) return hostCells @@ -198,7 +200,7 @@ class SONATAImporter(): # ------------------------------------------------------------------------------------------------------------ def __init__(self, **parameters): - print("Creating SONATAImporter %s..."%parameters) + logger.info("Creating SONATAImporter %s..."%parameters) self.parameters = parameters self.current_node = None self.current_node_group = None @@ -286,14 +288,14 @@ def importNet(self, configFile, replaceAxon=True, setdLNseg=True, swapSomaXY=Tru # create connections self.createConns() - #print('STOP HERE TO AVOID SIMULATING') + #logger.info('STOP HERE TO AVOID SIMULATING') #from IPython import embed; embed() # ------------------------------------------------------------------------------------------------------------ # create simulation config # ------------------------------------------------------------------------------------------------------------ def createSimulationConfig(self): - print("\nCreating simulation configuration from %s"%(self.config['simulation'])) + logger.info("\nCreating simulation configuration from %s"%(self.config['simulation'])) # set conditions required to replicate SONATA imported models sim.cfg.pt3dRelativeToCellLocation = False # Make cell 3d points relative to the cell x,y,z location @@ -316,14 +318,14 @@ def createSimulationConfig(self): #try: if 'node_sets_file' in self.simulation_config: - #print(self.substitutes) - #print(self.subs(self.rootFolder + '/' + self.simulation_config['node_sets_file'])) + #logger.info(self.substitutes) + #logger.info(self.subs(self.rootFolder + '/' + self.simulation_config['node_sets_file'])) # TEMPORARY FIX - FIX! sim.cfg.node_sets = load_json(self.subs(self.rootFolder+'/'+self.simulation_config['node_sets_file']).replace('$BASE_DIR','')) elif 'node_sets' in self.simulation_config: sim.cfg.node_sets = self.simulation_config['node_sets'] # except: - # print('Could not load node_sets...') + # logger.warning('Could not load node_sets...') # sim.cfg.node_sets = {} # inputs - add as 'spkTimes' to external population @@ -354,7 +356,7 @@ def createPops(self): nodes_file = self.subs(n['nodes_file']) node_types_file = self.subs(n['node_types_file']) - print("\nLoading nodes from %s and %s"%(nodes_file, node_types_file)) + logger.info("\nLoading nodes from %s and %s"%(nodes_file, node_types_file)) h5file = tables.open_file(nodes_file,mode='r') @@ -384,7 +386,7 @@ def createPops(self): self.pop_id_from_type[(sonata_pop, type)] = pop_id - print(" - Adding population: %s which has model info: %s"%(pop_id, info)) + logger.info(" - Adding population: %s which has model info: %s"%(pop_id, info)) size = self.cell_info[sonata_pop]['type_numbers'][type] @@ -544,7 +546,7 @@ def createCells(self): cellTags['params']['spkTimes'] = pop.tags['spkTimes'] # 1D list (same for all) sim.net.cells.append(pop.cellModelClass(gid, cellTags)) # instantiate Cell object - print(('Cell %d/%d (gid=%d) of pop %s, on node %d, ' % (icell, numCells, gid, pop_id, sim.rank))) + logger.info('Cell %d/%d (gid=%d) of pop %s, on node %d, ' % (icell, numCells, gid, pop_id, sim.rank)) sim.net.lastGid = sim.net.lastGid + numCells @@ -575,11 +577,11 @@ def createConns(self): edges_file = self.subs(e['edges_file']) edge_types_file = self.subs(e['edge_types_file']) - print("\nLoading edges from %s and %s"%(edges_file,edge_types_file)) + logger.info("\nLoading edges from %s and %s"%(edges_file,edge_types_file)) h5file=tables.open_file(edges_file,mode='r') - print("Opened HDF5 file: %s"%(h5file.filename)) + logger.info("Opened HDF5 file: %s"%(h5file.filename)) self.parse_group(h5file.root.edges) h5file.close() self.edges_info[self.current_edge] = load_csv_props(edge_types_file) @@ -590,7 +592,7 @@ def createConns(self): pre_node = self.conn_info[conn]['pre_node'] post_node = self.conn_info[conn]['post_node'] - print(' Adding projection %s: %s -> %s '%(conn, pre_node, post_node)) + logger.info(' Adding projection %s: %s -> %s '%(conn, pre_node, post_node)) # add all synMechs in this projection to netParams.synMechParams for type in self.edges_info[conn]: @@ -604,7 +606,7 @@ def createConns(self): synMechParams[synMechSubs[k]] = synMechParams.pop(k) synMechParams['mod'] = self.edges_info[conn][type]['model_template'] sim.net.params.synMechParams[syn_label] = synMechParams - print(' Added synMech %s '%(syn_label)) + logger.info(' Added synMech %s '%(syn_label)) # add individual connections in this projection for i in range(len(self.conn_info[conn]['pre_id'])): @@ -618,8 +620,8 @@ def createConns(self): type = self.conn_info[conn]['edge_type_id'][i] - print(' Conn: type %s pop %s (id %s) -> pop %s (id %s) MAPPED TO: cell gid %s -> cell gid %s'%(type,pre_node,pre_id,post_node,post_id, pre_gid,post_gid)) - #print(self.edges_info[conn][type]) + logger.info(' Conn: type %s pop %s (id %s) -> pop %s (id %s) MAPPED TO: cell gid %s -> cell gid %s'%(type,pre_node,pre_id,post_node,post_id, pre_gid,post_gid)) + #logger.info(self.edges_info[conn][type]) connParams = {} postCell = sim.net.cells[sim.net.gid2lid[post_gid]] @@ -665,7 +667,7 @@ def createNetStims(self): if info['input_type'] == 'spikes': - print(" - Adding input: %s which has info: %s"%(input, info)) + logger.info(" - Adding input: %s which has info: %s"%(input, info)) node_set = info['node_set'] # get cell type and pop_id cellType = self.cell_info[node_set]['types'][0] @@ -691,7 +693,7 @@ def createIClamps(self): info = self.simulation_config['inputs'][input] if info['input_type'] == 'current_clamp': - print(" - Adding input: %s which has info: %s"%(input, info)) + logger.info(" - Adding input: %s which has info: %s"%(input, info)) node_set = info['node_set'] sim.net.params.stimSourceParams[input] = { @@ -869,7 +871,7 @@ def setCellRuleDynamicParamsFromNeuroml_old(self, cell, cellRule): grp = vp.segment_groups path_vals = inhomogeneous_parameters[grp] expr = iv.value.replace('exp(','math.exp(') - #print("variable_parameter: %s, %s, %s"%(grp,iv, expr)) + #logger.info("variable_parameter: %s, %s, %s"%(grp,iv, expr)) for section_name in seg_grps_vs_nrn_sections[grp]: path_start, path_end = inhomogeneous_parameters[grp][section_name] @@ -880,7 +882,7 @@ def setCellRuleDynamicParamsFromNeuroml_old(self, cell, cellRule): nseg = cellRule['secs'][section_name]['geom']['nseg'] if 'nseg' in cellRule['secs'][section_name]['geom'] else 1 - #print(" Cond dens %s: %s S_per_cm2 (%s um) -> %s S_per_cm2 (%s um); nseg = %s"%(section_name,gmax_start,path_start,gmax_end,path_end, nseg)) + #logger.info(" Cond dens %s: %s S_per_cm2 (%s um) -> %s S_per_cm2 (%s um); nseg = %s"%(section_name,gmax_start,path_start,gmax_end,path_end, nseg)) gmax = [] for fract in [(2*i+1.0)/(2*nseg) for i in range(nseg)]: @@ -888,7 +890,7 @@ def setCellRuleDynamicParamsFromNeuroml_old(self, cell, cellRule): p = path_start + fract*(path_end-path_start) gmax_i = pynml.convert_to_units('%s S_per_m2'%eval(expr),'S_per_cm2') - #print(" Point %s at %s = %s"%(p,fract, gmax_i)) + #logger.info(" Point %s at %s = %s"%(p,fract, gmax_i)) gmax.append(gmax_i) if cm.ion_channel=='pas': @@ -961,8 +963,8 @@ def setCellRuleDynamicParamsFromNeuroml_old(self, cell, cellRule): cellRule['secs'][section_name]['ions'][specie.ion]['o'] = pynml.convert_to_units(specie.initial_ext_concentration,'mM') cellRule['secs'][section_name]['ions'][specie.ion]['i'] = pynml.convert_to_units(specie.initial_concentration,'mM') #cellRule['secs'][section_name]['mechs'][cell.concentratrionModel] = concentrationModelParams - #print(cell.concentratrionModel) - print(concentrationModelParams) + #logger.info(cell.concentratrionModel) + logger.info(concentrationModelParams) return cellRule @@ -1014,7 +1016,6 @@ def setCellRuleDynamicParamsFromJson(self, cell_dynamic_params, cellRule): for eion in erev: if eion.startswith('e'): if 'ions' not in cellRule['secs'][sec]: - print(sec, eion) cellRule['secs'][sec]['ions'] = {} cellRule['secs'][sec]['ions'][eion[1:]] = {'e': erev[eion]} @@ -1030,16 +1031,16 @@ def setCellRuleDynamicParamsFromJson(self, cell_dynamic_params, cellRule): # Parse SONATA hdf5 # ------------------------------------------------------------------------------------------------------------ def parse_group(self, g): - print("+++++++++++++++Parsing group: "+ str(g)+", name: "+g._v_name) + logger.info("+++++++++++++++Parsing group: "+ str(g)+", name: "+g._v_name) for node in g: - print(" ------Sub node: %s, class: %s, name: %s (parent: %s)" % (node,node._c_classid,node._v_name, g._v_name)) + logger.info(" ------Sub node: %s, class: %s, name: %s (parent: %s)" % (node,node._c_classid,node._v_name, g._v_name)) if node._c_classid == 'GROUP': if g._v_name=='nodes': node_id = node._v_name.replace('-','_') self.current_node = node_id - print('# CURRENT NODE: %s'%(self.current_node)) + logger.info('# CURRENT NODE: %s'%(self.current_node)) self.cell_info[self.current_node] = {} self.cell_info[self.current_node]['types'] = {} self.cell_info[self.current_node]['type_numbers'] = {} @@ -1051,20 +1052,20 @@ def parse_group(self, g): if g._v_name==self.current_node: node_group = node._v_name self.current_node_group = node_group - print('# CURRENT NODE GROUP: %s'%(self.current_node)) + logger.info('# CURRENT NODE GROUP: %s'%(self.current_node)) self.cell_info[self.current_node][self.current_node_group] = {} self.cell_info[self.current_node][self.current_node_group]['locations'] = {} if g._v_name=='edges': edge_id = node._v_name.replace('-','_') - print(' Found edge: %s'%edge_id) + logger.info(' Found edge: %s'%edge_id) self.current_edge = edge_id self.conn_info[self.current_edge] = {} if g._v_name==self.current_edge: self.current_pre_node = g._v_name.split('_to_')[0] self.current_post_node = g._v_name.split('_to_')[1] - print(' Found edge %s -> %s'%(self.current_pre_node, self.current_post_node)) + logger.info(' Found edge %s -> %s'%(self.current_pre_node, self.current_post_node)) self.conn_info[self.current_edge]['pre_node'] = self.current_pre_node self.conn_info[self.current_edge]['post_node'] = self.current_post_node @@ -1083,7 +1084,7 @@ def _is_dataset(self, node): def parse_dataset(self, d): - print("Parsing dataset/array: %s; at node: %s, node_group %s"%(str(d), self.current_node, self.current_node_group)) + logger.info("Parsing dataset/array: %s; at node: %s, node_group %s"%(str(d), self.current_node, self.current_node_group)) if self.current_node_group: for i in range(0, d.shape[0]): @@ -1124,7 +1125,7 @@ def parse_dataset(self, d): elif d.name=='syn_weight': self.conn_info[self.current_edge]['syn_weight'] = [i for i in d] else: - print("Unhandled dataset: %s"%d.name) + logger.warning("Unhandled dataset: %s"%d.name) # ------------------------------------------------------------------------------------------------------------ diff --git a/netpyne/logger.py b/netpyne/logger.py new file mode 100644 index 000000000..b0ca99711 --- /dev/null +++ b/netpyne/logger.py @@ -0,0 +1,55 @@ +import logging +import sys + +class Logger(): + ''' + The class we use to print out our logs throughout netpyne. + + The default level of logging verbosity python sets is logging.WARNING, + but netpyne users are likely to want it more verbose - thus we set it to logging.INFO. + + The user can change the default logging levels with: + import logging + logging.getLogger('netpyne').setLevel(logging.WARNING) + Meaningful levels are: logging.DEBUG, logging.INFO, logging.WARNING and logging.ERROR. + + The user can separately control whether additional timing statements are printed out via the + specs.SimConfig({ timing: True/False }) + option. + ''' + + def __init__(self): + self.netpyne_logger = logging.getLogger('netpyne') + self.netpyne_logger.setLevel(logging.INFO) + + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setFormatter(logging.Formatter('%(levelname)-8s %(message)s')) + + self.netpyne_logger.addHandler(console_handler) + + def debug(self, *args, **kwargs): + self.netpyne_logger.debug(*args, **kwargs) + + def info(self, *args, **kwargs): + self.netpyne_logger.info(*args, **kwargs) + + def warning(self, *args, **kwargs): + self.netpyne_logger.warning(*args, **kwargs) + + def timing(self, *args, **kwargs): + from . import sim + if sim.cfg.timing: + self.netpyne_logger.info(*args, **kwargs) + + # The methods we don't yet use: + # + # def error(*args, **kwargs): + # self.netpyne_logger.error(*args, **kwargs) + # + # def exception(*args, **kwargs): + # self.netpyne_logger.exception(*args, **kwargs) + # + # def critical(*args, **kwargs): + # self.netpyne_logger.debug(*args, **kwargs) + +logger = Logger() diff --git a/netpyne/network/conn.py b/netpyne/network/conn.py index 1fa4b9926..73faeb16e 100644 --- a/netpyne/network/conn.py +++ b/netpyne/network/conn.py @@ -4,7 +4,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -23,7 +22,7 @@ from array import array as arrayFast from numbers import Number from numpy import array, sin, cos, tan, exp, sqrt, mean, inf, dstack, unravel_index, argsort, zeros, ceil, copy - +from netpyne.logger import logger # ----------------------------------------------------------------------------- # Connect Cells @@ -47,7 +46,7 @@ def connectCells(self): # Instantiate network connections based on the connectivity rules defined in params sim.timing('start', 'connectTime') if sim.rank==0: - print('Making connections...') + logger.info('Making connections...') if sim.nhosts > 1: # Gather tags from all cells allCellTags = sim._gatherAllCellTags() @@ -93,7 +92,7 @@ def connectCells(self): if sim.cfg.printSynsAfterRule: nodeSynapses = sum([len(cell.conns) for cell in sim.net.cells]) - print((' Number of synaptic contacts on node %i after conn rule %s: %i ' % (sim.rank, connParamLabel, nodeSynapses))) + logger.info(' Number of synaptic contacts on node %i after conn rule %s: %i ' % (sim.rank, connParamLabel, nodeSynapses)) # add presynaptoc gap junctions @@ -133,12 +132,12 @@ def connectCells(self): else: nodeConnections = nodeSynapses - print((' Number of connections on node %i: %i ' % (sim.rank, nodeConnections))) + logger.info(' Number of connections on node %i: %i ' % (sim.rank, nodeConnections)) if nodeSynapses != nodeConnections: - print((' Number of synaptic contacts on node %i: %i ' % (sim.rank, nodeSynapses))) + logger.info(' Number of synaptic contacts on node %i: %i ' % (sim.rank, nodeSynapses)) sim.pc.barrier() sim.timing('stop', 'connectTime') - if sim.rank == 0 and sim.cfg.timing: print((' Done; cell connection time = %0.2f s.' % sim.timingData['connectTime'])) + if sim.rank == 0: logger.timing(' Done; cell connection time = %0.2f s.' % sim.timingData['connectTime']) return [cell.conns for cell in self.cells] @@ -269,7 +268,7 @@ def _disynapticBiasProb(self, origProbability, bias, prePreGids, postPreGids, di elif disynCounter > -maxImbalance: probability = max(origProbability - (min(origProbability + bias, 1.0) - origProbability), 0.0) disynCounter -= 1 - #print disynCounter, origProbability, probability + #logger.info disynCounter, origProbability, probability return probability, disynCounter @@ -343,7 +342,7 @@ def fullConn(self, preCellsTags, postCellsTags, connParam): from .. import sim - if sim.cfg.verbose: print('Generating set of all-to-all connections (rule: %s) ...' % (connParam['label'])) + logger.debug('Generating set of all-to-all connections (rule: %s) ...' % (connParam['label'])) # get list of params that have a lambda function paramsStrFunc = [param for param in [p+'Func' for p in self.connStringFuncParams] if param in connParam] @@ -431,7 +430,7 @@ def probConn(self, preCellsTags, postCellsTags, connParam): from .. import sim - if sim.cfg.verbose: print('Generating set of probabilistic connections (rule: %s) ...' % (connParam['label'])) + logger.debug('Generating set of probabilistic connections (rule: %s) ...' % (connParam['label'])) allRands = self.generateRandsPrePost(preCellsTags, postCellsTags) @@ -463,8 +462,8 @@ def probConn(self, preCellsTags, postCellsTags, connParam): # standard probabilistic conenctions else: - # print('rank %d'%(sim.rank)) - # print(connParam) + # logger.info('rank %d'%(sim.rank)) + # logger.info(connParam) # calculate the conn preGids of the each pre and post cell # for postCellGid,postCellTags in sorted(postCellsTags.items()): # for each postsyn cell for postCellGid,postCellTags in postCellsTags.items(): # for each postsyn cell # for each postsyn cell @@ -550,7 +549,7 @@ def convConn(self, preCellsTags, postCellsTags, connParam): from .. import sim - if sim.cfg.verbose: print('Generating set of convergent connections (rule: %s) ...' % (connParam['label'])) + logger.debug('Generating set of convergent connections (rule: %s) ...' % (connParam['label'])) # get list of params that have a lambda function paramsStrFunc = [param for param in [p+'Func' for p in self.connStringFuncParams] if param in connParam] @@ -619,7 +618,7 @@ def divConn(self, preCellsTags, postCellsTags, connParam): from .. import sim - if sim.cfg.verbose: print('Generating set of divergent connections (rule: %s) ...' % (connParam['label'])) + logger.debug('Generating set of divergent connections (rule: %s) ...' % (connParam['label'])) # get list of params that have a lambda function paramsStrFunc = [param for param in [p+'Func' for p in self.connStringFuncParams] if param in connParam] @@ -686,7 +685,7 @@ def fromListConn(self, preCellsTags, postCellsTags, connParam): from .. import sim - if sim.cfg.verbose: print('Generating set of connections from list (rule: %s) ...' % (connParam['label'])) + logger.debug('Generating set of connections from list (rule: %s) ...' % (connParam['label'])) orderedPreGids = sorted(preCellsTags) orderedPostGids = sorted(postCellsTags) diff --git a/netpyne/network/modify.py b/netpyne/network/modify.py index c9a48f0f1..0c26b20f1 100644 --- a/netpyne/network/modify.py +++ b/netpyne/network/modify.py @@ -4,11 +4,10 @@ """ -from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import - +from netpyne.logger import logger # ----------------------------------------------------------------------------- # Modify cell params @@ -43,7 +42,7 @@ def modifyCells(self, params, updateMasterAllCells=False): # Instantiate network connections based on the connectivity rules defined in params sim.timing('start', 'modifyCellsTime') if sim.rank==0: - print('Modfying cell parameters...') + logger.info('Modifying cell parameters...') for cell in self.cells: cell.modify(params) @@ -52,7 +51,7 @@ def modifyCells(self, params, updateMasterAllCells=False): sim._gatherCells() # update allCells sim.timing('stop', 'modifyCellsTime') - if sim.rank == 0 and sim.cfg.timing: print((' Done; cells modification time = %0.2f s.' % sim.timingData['modifyCellsTime'])) + if sim.rank == 0: logger.timing(' Done; cells modification time = %0.2f s.' % sim.timingData['modifyCellsTime']) # ----------------------------------------------------------------------------- @@ -86,7 +85,7 @@ def modifySynMechs(self, params, updateMasterAllCells=False): # Instantiate network connections based on the connectivity rules defined in params sim.timing('start', 'modifySynMechsTime') if sim.rank==0: - print('Modfying synaptic mech parameters...') + logger.info('Modifying synaptic mech parameters...') for cell in self.cells: cell.modifySynMechs(params) @@ -95,7 +94,7 @@ def modifySynMechs(self, params, updateMasterAllCells=False): sim._gatherCells() # update allCells sim.timing('stop', 'modifySynMechsTime') - if sim.rank == 0 and sim.cfg.timing: print((' Done; syn mechs modification time = %0.2f s.' % sim.timingData['modifySynMechsTime'])) + if sim.rank == 0: logger.timing(' Done; syn mechs modification time = %0.2f s.' % sim.timingData['modifySynMechsTime']) # ----------------------------------------------------------------------------- @@ -129,7 +128,7 @@ def modifyConns(self, params, updateMasterAllCells=False): # Instantiate network connections based on the connectivity rules defined in params sim.timing('start', 'modifyConnsTime') if sim.rank==0: - print('Modfying connection parameters...') + logger.info('Modifying connection parameters...') for cell in self.cells: cell.modifyConns(params) @@ -138,7 +137,7 @@ def modifyConns(self, params, updateMasterAllCells=False): sim._gatherCells() # update allCells sim.timing('stop', 'modifyConnsTime') - if sim.rank == 0 and sim.cfg.timing: print((' Done; connections modification time = %0.2f s.' % sim.timingData['modifyConnsTime'])) + if sim.rank == 0: logger.timing(' Done; connections modification time = %0.2f s.' % sim.timingData['modifyConnsTime']) # ----------------------------------------------------------------------------- @@ -172,7 +171,7 @@ def modifyStims(self, params, updateMasterAllCells=False): # Instantiate network connections based on the connectivity rules defined in params sim.timing('start', 'modifyStimsTime') if sim.rank==0: - print('Modfying stimulation parameters...') + logger.info('Modifying stimulation parameters...') for cell in self.cells: cell.modifyStims(params) @@ -181,4 +180,4 @@ def modifyStims(self, params, updateMasterAllCells=False): sim._gatherCells() # update allCells sim.timing('stop', 'modifyStimsTime') - if sim.rank == 0 and sim.cfg.timing: print((' Done; stims modification time = %0.2f s.' % sim.timingData['modifyStimsTime'])) + if sim.rank == 0: logger.timing(' Done; stims modification time = %0.2f s.' % sim.timingData['modifyStimsTime']) diff --git a/netpyne/network/netrxd.py b/netpyne/network/netrxd.py index 24a2cdd74..4c90a6bd0 100644 --- a/netpyne/network/netrxd.py +++ b/netpyne/network/netrxd.py @@ -4,11 +4,12 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import +from netpyne.logger import logger + from builtins import dict from builtins import range @@ -23,7 +24,7 @@ try: from neuron.crxd import rxdmath except: - print('Warning: Could not import rxdmath module') + logger.warning('Could not import rxdmath module') # ----------------------------------------------------------------------------- # Add RxD @@ -52,9 +53,9 @@ def addRxD (self, nthreads=None): sim.net.rxd = {'species': {}, 'regions': {}} # dictionary for rxd if nthreads: rxd.nthread(nthreads) - print('Using %d threads for RxD' % (nthreads)) + logger.info('Using %d threads for RxD' % (nthreads)) except: - print('cRxD module not available') + logger.warning('cRxD module not available') return -1 else: return -1 @@ -63,7 +64,7 @@ def addRxD (self, nthreads=None): # Instantiate network connections based on the connectivity rules defined in params sim.timing('start', 'rxdTime') if sim.rank==0: - print('Adding RxD...') + logger.info('Adding RxD...') # make copy of Python structure #if sim.cfg.createPyStruct: -- don't make conditional since need to have Python structure @@ -91,7 +92,7 @@ def addRxD (self, nthreads=None): sim.pc.barrier() sim.timing('stop', 'rxdTime') - if sim.rank == 0 and sim.cfg.timing: print((' Done; RxD setup time = %0.2f s.' % sim.timingData['rxdTime'])) + if sim.rank == 0: logger.timing(' Done; RxD setup time = %0.2f s.' % sim.timingData['rxdTime']) return sim.net.rxd @@ -134,7 +135,7 @@ def _addRegions(self, params): geometry['hObj'] = getattr(rxd, param['geometry']['class'])(**param['geometry']['args']) geometry = geometry['hObj'] except: - print(' Error creating %s Region geometry using %s class'%(label, param['geometry']['class'])) + logger.warning(' Error creating %s Region geometry using %s class'%(label, param['geometry']['class'])) elif isinstance(param['geometry'], str): geometry = getattr(rxd, param['geometry'])() @@ -172,7 +173,7 @@ def _addRegions(self, params): name=label) else: self.rxd['regions'][label]['hObj'] = None - print(' Created Region %s'%(label)) + logger.info(' Created Region %s'%(label)) # ----------------------------------------------------------------------------- @@ -183,7 +184,7 @@ def _addExtracellularRegion(self, label, param): try: rxd.options.enable.extracellular = True except: - print('Error enabling extracellular rxd') + logger.warning('Error enabling extracellular rxd') return -1 # (xlo, ylo, zlo, xhi, yhi, zhi, dx, volume_fraction=1, tortuosity=1) @@ -191,7 +192,7 @@ def _addExtracellularRegion(self, label, param): requiredArgs = ['xlo', 'ylo', 'zlo', 'xhi', 'yhi', 'zhi', 'dx'] for arg in requiredArgs: if arg not in param: - print(' Error creating Extracellular object %s: %s parameter was missing'%(label, arg)) + logger.warning(' Error creating Extracellular object %s: %s parameter was missing'%(label, arg)) if 'volume_fraction' not in param: param['volume_fraction'] = 1 @@ -202,7 +203,7 @@ def _addExtracellularRegion(self, label, param): # call rxd method to create Region self.rxd['regions'][label]['hObj'] = rxd.Extracellular(**{k:v for k,v in param.items() if k != 'extracellular'}) - print(' Created Extracellular Region %s'%(label)) + logger.info(' Created Extracellular Region %s'%(label)) # ----------------------------------------------------------------------------- @@ -214,7 +215,7 @@ def _addSpecies(self, params): for label, param in params.items(): # regions if 'regions' not in param: - print(' Error creating Species %s: "regions" parameter was missing'%(label)) + logger.warning(' Error creating Species %s: "regions" parameter was missing'%(label)) continue if not isinstance(param['regions'], list): param['regions'] = [param['regions']] @@ -222,7 +223,7 @@ def _addSpecies(self, params): nrnRegions = [self.rxd['regions'][region]['hObj'] for region in param['regions'] if self.rxd['regions'][region]['hObj'] != None] except: - print(' Error creating Species %s: could not find regions %s'%(label, param['regions'])) + logger.warning(' Error creating Species %s: could not find regions %s'%(label, param['regions'])) # d if 'd' not in param: @@ -246,7 +247,7 @@ def _addSpecies(self, params): exec(funcStr, {'rxd': rxd}, {'sim': sim}) initial = sim.net.rxd["species"][label]["initialFunc"] except: - print(' Error creating Species %s: cannot evaluate "initial" expression -- "%s"'%(label, param['initial'])) + logger.warning(' Error creating Species %s: cannot evaluate "initial" expression -- "%s"'%(label, param['initial'])) continue else: initial = param['initial'] @@ -274,7 +275,7 @@ def _addSpecies(self, params): ecs_boundary_conditions=param['ecs_boundary_conditions']) else: self.rxd['species'][label]['hObj'] = None - print(' Created Species %s'%(label)) + logger.info(' Created Species %s'%(label)) # ----------------------------------------------------------------------------- @@ -286,14 +287,14 @@ def _addStates(self, params): for label, param in params.items(): # regions if 'regions' not in param: - print(' Error creating State %s: "regions" parameter was missing'%(label)) + logger.warning(' Error creating State %s: "regions" parameter was missing'%(label)) continue if not isinstance(param['regions'], list): param['regions'] = [param['regions']] try: nrnRegions = [self.rxd['regions'][region]['hObj'] for region in param['regions'] if self.rxd['regions'][region]['hObj'] != None] except: - print(' Error creating State %s: could not find regions %s'%(label, param['regions'])) + logger.warning(' Error creating State %s: could not find regions %s'%(label, param['regions'])) # initial if 'initial' not in param: @@ -309,7 +310,7 @@ def _addStates(self, params): exec(funcStr, {'rxd': rxd}, {'sim': sim}) initial = sim.net.rxd["species"][label]["initialFunc"] except: - print(' Error creating State %s: cannot evaluate "initial" expression -- "%s"'%(label, param['initial'])) + logger.warning(' Error creating State %s: cannot evaluate "initial" expression -- "%s"'%(label, param['initial'])) continue else: initial = param['initial'] @@ -324,7 +325,7 @@ def _addStates(self, params): initial=initial, name=name) else: self.rxd['states'][label]['hObj'] = None - print(' Created State %s'%(label)) + logger.info(' Created State %s'%(label)) # ----------------------------------------------------------------------------- # Add RxD parameters @@ -335,14 +336,14 @@ def _addParameters(self, params): for label, param in params.items(): # regions if 'regions' not in param: - print(' Error creating State %s: "regions" parameter was missing'%(label)) + logger.warning(' Error creating State %s: "regions" parameter was missing'%(label)) continue if not isinstance(param['regions'], list): param['regions'] = [param['regions']] try: nrnRegions = [self.rxd['regions'][region]['hObj'] for region in param['regions']] except: - print(' Error creating State %s: could not find regions %s'%(label, param['regions'])) + logger.warning(' Error creating State %s: could not find regions %s'%(label, param['regions'])) if 'name' not in param: param['name'] = None @@ -363,7 +364,7 @@ def _addParameters(self, params): exec(funcStr, {'rxd': rxd}, {'sim': sim}) value = sim.net.rxd["parameters"][label]["initialFunc"] except: - print(' Error creating Parameter %s: cannot evaluate "value" expression -- "%s"'%(label, param['value'])) + logger.warning(' Error creating Parameter %s: cannot evaluate "value" expression -- "%s"'%(label, param['value'])) continue else: value = param['value'] @@ -373,7 +374,7 @@ def _addParameters(self, params): value=value, charge=param['charge'], name=param['name']) - print(' Created Parameter %s'%(label)) + logger.info(' Created Parameter %s'%(label)) # ----------------------------------------------------------------------------- # Add RxD reactions @@ -390,7 +391,7 @@ def _addReactions(self, params, multicompartment=False): # reactant if 'reactant' not in param: - print(' Error creating %s %s: "reactant" parameter was missing'%(reactionStr,label)) + logger.warning(' Error creating %s %s: "reactant" parameter was missing'%(reactionStr,label)) continue reactantStr = self._replaceRxDStr(param['reactant']) try: @@ -401,7 +402,7 @@ def _addReactions(self, params, multicompartment=False): # product if 'product' not in param: - print(' Error creating %s %s: "product" parameter was missing'%(reactionStr,label)) + logger.warning(' Error creating %s %s: "product" parameter was missing'%(reactionStr,label)) continue productStr = self._replaceRxDStr(param['product']) #from IPython import embed @@ -411,7 +412,7 @@ def _addReactions(self, params, multicompartment=False): # rate_f if 'rate_f' not in param: - print(' Error creating %s %s: "scheme" parameter was missing'%(reactionStr,label)) + logger.warning(' Error creating %s %s: "scheme" parameter was missing'%(reactionStr,label)) continue if isinstance(param['rate_f'], basestring): rate_fStr = self._replaceRxDStr(param['rate_f']) @@ -440,7 +441,7 @@ def _addReactions(self, params, multicompartment=False): try: nrnRegions = [self.rxd['regions'][region]['hObj'] for region in param['regions'] if region is not None and self.rxd['regions'][region]['hObj'] != None] except: - print(' Error creating %s %s: could not find regions %s'%(reactionStr, label, param['regions'])) + logger.warning(' Error creating %s %s: could not find regions %s'%(reactionStr, label, param['regions'])) # membrane if 'membrane' not in param: @@ -482,7 +483,7 @@ def _addReactions(self, params, multicompartment=False): membrane=nrnMembraneRegion) - print(' Created %s %s'%(reactionStr, label)) + logger.info(' Created %s %s'%(reactionStr, label)) # ----------------------------------------------------------------------------- # Add RxD reactions @@ -496,19 +497,19 @@ def _addRates(self, params): # species if 'species' not in param: - print(' Error creating Rate %s: "species" parameter was missing'%(label)) + logger.warning(' Error creating Rate %s: "species" parameter was missing'%(label)) continue if isinstance(param['species'], basestring): speciesStr = self._replaceRxDStr(param['species']) exec('species = ' + speciesStr, dynamicVars) if 'species' not in dynamicVars: dynamicVars['species'] # fix for python 2 else: - print(' Error creating Rate %s: "species" parameter should be a string'%(param['species'])) + logger.warning(' Error creating Rate %s: "species" parameter should be a string'%(param['species'])) continue # rate if 'rate' not in param: - print(' Error creating Rate %s: "rate" parameter was missing'%(label)) + logger.warning(' Error creating Rate %s: "rate" parameter was missing'%(label)) continue if isinstance(param['rate'], basestring): rateStr = self._replaceRxDStr(param['rate']) @@ -523,7 +524,7 @@ def _addRates(self, params): try: nrnRegions = [self.rxd['regions'][region]['hObj'] for region in param['regions'] if region is not None and self.rxd['regions'][region]['hObj'] != None] except: - print(' Error creating Rate %s: could not find regions %s'%(label, param['regions'])) + logger.warning(' Error creating Rate %s: could not find regions %s'%(label, param['regions'])) # membrane_flux if 'membrane_flux' not in param: @@ -534,7 +535,7 @@ def _addRates(self, params): regions=nrnRegions, membrane_flux=param['membrane_flux']) - print(' Created Rate %s'%(label)) + logger.info(' Created Rate %s'%(label)) # ----------------------------------------------------------------------------- # Replace RxD param strings with expression diff --git a/netpyne/network/network.py b/netpyne/network/network.py index 2773d078b..ce14f65f2 100644 --- a/netpyne/network/network.py +++ b/netpyne/network/network.py @@ -4,7 +4,6 @@ """ -from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import @@ -13,6 +12,7 @@ standard_library.install_aliases() from ..specs import ODict from neuron import h # import NEURON +from netpyne.logger import logger class Network(object): """ @@ -78,7 +78,7 @@ def createCells(self): sim.pc.barrier() sim.timing('start', 'createTime') if sim.rank==0: - print(("\nCreating network of %i cell populations on %i hosts..." % (len(self.pops), sim.nhosts))) + logger.info("\nCreating network of %i cell populations on %i hosts..." % (len(self.pops), sim.nhosts)) self._setDiversityRanges() # update fractions for rules @@ -86,14 +86,14 @@ def createCells(self): newCells = ipop.createCells() # create cells for this pop using Pop method self.cells.extend(newCells) # add to list of cells sim.pc.barrier() - if sim.rank==0 and sim.cfg.verbose: print(('Instantiated %d cells of population %s'%(len(newCells), ipop.tags['pop']))) + if sim.rank==0: logger.timing('Instantiated %d cells of population %s'%(len(newCells), ipop.tags['pop'])) if self.params.defineCellShapes: self.defineCellShapes() - print((' Number of cells on node %i: %i ' % (sim.rank,len(self.cells)))) + logger.info(' Number of cells on node %i: %i ' % (sim.rank,len(self.cells))) sim.pc.barrier() sim.timing('stop', 'createTime') - if sim.rank == 0 and sim.cfg.timing: print((' Done; cell creation time = %0.2f s.' % sim.timingData['createTime'])) + if sim.rank == 0: logger.timing(' Done; cell creation time = %0.2f s.' % sim.timingData['createTime']) return self.cells diff --git a/netpyne/network/pop.py b/netpyne/network/pop.py index 5c49b1683..3bc476578 100644 --- a/netpyne/network/pop.py +++ b/netpyne/network/pop.py @@ -4,7 +4,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -20,7 +19,7 @@ from numpy import pi, sqrt, sin, cos, arccos import numpy as np from neuron import h # Import NEURON - +from netpyne.logger import logger ############################################################################### # @@ -60,8 +59,7 @@ def _distributeCells(self, numCellsPop): if sim.nextHost>=sim.nhosts: sim.nextHost=0 - if sim.cfg.verbose: - print(("Distributed population of %i cells on %s hosts: %s, next: %s"%(numCellsPop,sim.nhosts,hostCells,sim.nextHost))) + logger.debug("Distributed population of %i cells on %s hosts: %s, next: %s"%(numCellsPop,sim.nhosts,hostCells,sim.nextHost)) return hostCells @@ -89,7 +87,7 @@ def createCells(self): # not enough tags to create cells else: self.tags['numCells'] = 1 - print('Warninig: number or density of cells not specified for population %s; defaulting to numCells = 1' % (self.tags['pop'])) + logger.warning('Number or density of cells not specified for population %s; defaulting to numCells = 1' % (self.tags['pop'])) cells = self.createCellsFixedNum() return cells @@ -174,7 +172,7 @@ def createCellsFixedNum (self): cellTags['params']['rates'] = [self.tags['dynamicRates']['rates'], self.tags['dynamicRates']['times']] # 1D list (same for all) cells.append(self.cellModelClass(gid, cellTags)) # instantiate Cell object - if sim.cfg.verbose: print(('Cell %d/%d (gid=%d) of pop %s, on node %d, '%(i, sim.net.params.scale * self.tags['numCells']-1, gid, self.tags['pop'], sim.rank))) + logger.debug('Cell %d/%d (gid=%d) of pop %s, on node %d, '%(i, sim.net.params.scale * self.tags['numCells']-1, gid, self.tags['pop'], sim.rank)) sim.net.lastGid = sim.net.lastGid + self.tags['numCells'] return cells @@ -214,7 +212,7 @@ def createCellsDensity (self): strFunc = self.tags['density'] # string containing function strVars = [var for var in ['xnorm', 'ynorm', 'znorm'] if var in strFunc] # get list of variables used if not len(strVars) == 1: - print('Error: density function (%s) for population %s does not include "xnorm", "ynorm" or "znorm"'%(strFunc,self.tags['pop'])) + logger.warning('Error: density function (%s) for population %s does not include "xnorm", "ynorm" or "znorm"'%(strFunc,self.tags['pop'])) return coordFunc = strVars[0] lambdaStr = 'lambda ' + coordFunc +': ' + strFunc # convert to lambda function @@ -234,9 +232,9 @@ def createCellsDensity (self): makethiscell = locsProb>allrands # perform test to see whether or not this cell should be included (pruning based on density func) funcLocs = [locsAll[i] for i in range(len(locsAll)) if i in np.array(makethiscell.nonzero()[0],dtype='int')] # keep only subset of yfuncLocs based on density func self.tags['numCells'] = len(funcLocs) # final number of cells after pruning of location values based on density func - if sim.cfg.verbose: print('Volume=%.2f, maxDensity=%.2f, maxCells=%.0f, numCells=%.0f'%(volume, maxDensity, maxCells, self.tags['numCells'])) + logger.debug('Volume=%.2f, maxDensity=%.2f, maxCells=%.0f, numCells=%.0f'%(volume, maxDensity, maxCells, self.tags['numCells'])) else: - print('Error: Density functions are only implemented for cuboid shaped networks') + logger.warning('Error: Density functions are only implemented for cuboid shaped networks') exit(0) else: # NO ynorm-dep self.tags['numCells'] = int(self.tags['density'] * volume) # = density (cells/mm^3) * volume (mm^3) @@ -278,7 +276,7 @@ def createCellsDensity (self): if funcLocs and coordFunc == coord+'norm': # if locations for this coordinate calculated using density function randLocs[:,icoord] = funcLocs - if sim.cfg.verbose and not funcLocs: print('Volume=%.4f, density=%.2f, numCells=%.0f'%(volume, self.tags['density'], self.tags['numCells'])) + if not funcLocs: logger.debug('Volume=%.4f, density=%.2f, numCells=%.0f'%(volume, self.tags['density'], self.tags['numCells'])) for i in self._distributeCells(self.tags['numCells'])[sim.rank]: gid = sim.net.lastGid+i @@ -292,8 +290,7 @@ def createCellsDensity (self): cellTags['y'] = sizeY * randLocs[i,1] # calculate y location (um) cellTags['z'] = sizeZ * randLocs[i,2] # calculate z location (um) cells.append(self.cellModelClass(gid, cellTags)) # instantiate Cell object - if sim.cfg.verbose: - print(('Cell %d/%d (gid=%d) of pop %s, pos=(%2.f, %2.f, %2.f), on node %d, '%(i, self.tags['numCells']-1, gid, self.tags['pop'],cellTags['x'], cellTags['y'], cellTags['z'], sim.rank))) + logger.debug('Cell %d/%d (gid=%d) of pop %s, pos=(%2.f, %2.f, %2.f), on node %d, '%(i, self.tags['numCells']-1, gid, self.tags['pop'],cellTags['x'], cellTags['y'], cellTags['z'], sim.rank)) sim.net.lastGid = sim.net.lastGid + self.tags['numCells'] return cells @@ -325,7 +322,7 @@ def createCellsList (self): if 'cellModel' in self.tags.keys() and self.tags['cellModel'] == 'Vecstim': # if VecStim, copy spike times to params cellTags['params']['spkTimes'] = self.tags['cellsList'][i]['spkTimes'] cells.append(self.cellModelClass(gid, cellTags)) # instantiate Cell object - if sim.cfg.verbose: print(('Cell %d/%d (gid=%d) of pop %d, on node %d, '%(i, self.tags['numCells']-1, gid, i, sim.rank))) + logger.debug('Cell %d/%d (gid=%d) of pop %d, on node %d, '%(i, self.tags['numCells']-1, gid, i, sim.rank)) sim.net.lastGid = sim.net.lastGid + len(self.tags['cellsList']) return cells @@ -375,7 +372,7 @@ def createCellsGrid (self): cellTags['y'] = gridLocs[i][1] # set y location (um) cellTags['z'] = gridLocs[i][2] # set z location (um) cells.append(self.cellModelClass(gid, cellTags)) # instantiate Cell object - if sim.cfg.verbose: print(('Cell %d/%d (gid=%d) of pop %s, on node %d, '%(i, numCells, gid, self.tags['pop'], sim.rank))) + logger.debug('Cell %d/%d (gid=%d) of pop %s, on node %d, '%(i, numCells, gid, self.tags['pop'], sim.rank)) sim.net.lastGid = sim.net.lastGid + numCells return cells @@ -416,7 +413,7 @@ def _setCellClass (self): sim.net.params.popTagsCopiedToCells.append('params') except: if getattr(self.tags, 'cellModel', None) in ['NetStim', 'DynamicNetStim', 'VecStim', 'IntFire1', 'IntFire2', 'IntFire4']: - print('Warning: could not find %s point process mechanism required for population %s' % (cellModel, self.tags['pop'])) + logger.warning('Could not find %s point process mechanism required for population %s' % (cellModel, self.tags['pop'])) self.cellModelClass = sim.CompartCell # otherwise assume has sections and some cellParam rules apply to it; use CompartCell diff --git a/netpyne/network/stim.py b/netpyne/network/stim.py index 6e4bd6e85..4eb603a00 100644 --- a/netpyne/network/stim.py +++ b/netpyne/network/stim.py @@ -4,7 +4,6 @@ """ -from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import @@ -12,6 +11,7 @@ from future import standard_library standard_library.install_aliases() from numbers import Number +from netpyne.logger import logger try: basestring except NameError: @@ -40,7 +40,7 @@ def addStims(self): sim.timing('start', 'stimsTime') if self.params.stimSourceParams and self.params.stimTargetParams: if sim.rank==0: - print('Adding stims...') + logger.info('Adding stims...') if sim.nhosts > 1: # Gather tags from all cells allCellTags = sim._gatherAllCellTags() @@ -115,10 +115,10 @@ def addStims(self): else: postCell.addStim(params) # call cell method to add connection - print((' Number of stims on node %i: %i ' % (sim.rank, sum([len(cell.stims) for cell in self.cells])))) + logger.info(' Number of stims on node %i: %i ' % (sim.rank, sum([len(cell.stims) for cell in self.cells]))) sim.pc.barrier() sim.timing('stop', 'stimsTime') - if sim.rank == 0 and sim.cfg.timing: print((' Done; cell stims creation time = %0.2f s.' % sim.timingData['stimsTime'])) + if sim.rank == 0: logger.timing(' Done; cell stims creation time = %0.2f s.' % sim.timingData['stimsTime']) return [cell.stims for cell in self.cells] diff --git a/netpyne/network/subconn.py b/netpyne/network/subconn.py index 2fa543f60..68f14a4ae 100644 --- a/netpyne/network/subconn.py +++ b/netpyne/network/subconn.py @@ -4,7 +4,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -19,6 +18,7 @@ standard_library.install_aliases() import numpy as np from neuron import h +from netpyne.logger import logger # ----------------------------------------------------------------------------- # Calculate distance between 2 segments @@ -61,7 +61,7 @@ def _posFromLoc(self, sec, x): if h.arc3d(ii) >= s: b = ii break - if b == -1: print("an error occurred in pointFromLoc, SOMETHING IS NOT RIGHT") + if b == -1: logger.warning("An error occurred in pointFromLoc, SOMETHING IS NOT RIGHT") if h.arc3d(b) == s: # shortcut x, y, z = h.x3d(b), h.y3d(b), h.z3d(b) @@ -99,7 +99,7 @@ def _interpolateSegmentSigma(self, cell, secList, gridX, gridY, gridSigma): sigma_x2_y2 = gridSigma[i2][j2] if x1 == x2 or y1 == y2: - print("ERROR in closest grid points: ", secName, x1, x2, y1, y2) + logger.warning("ERROR in closest grid points: " + secName + " " + str(x1) + " " + str(x2) + " " + str(y1) + " " + str(y2)) else: # bilinear interpolation, see http://en.wikipedia.org/wiki/Bilinear_interpolation (fixed bug from Ben Suter's code) sigma = ((sigma_x1_y1*abs(x2-x)*abs(y2-y) + sigma_x2_y1*abs(x-x1)*abs(y2-y) + sigma_x1_y2*abs(x2-x)*abs(y-y1) + sigma_x2_y2*abs(x-x1)*abs(y-y1))/(abs(x2-x1)*abs(y2-y1))) @@ -115,7 +115,7 @@ def _interpolateSegmentSigma(self, cell, secList, gridX, gridY, gridSigma): sigma_y2 = gridSigma[j2] if y1 == y2: - print("ERROR in closest grid points: ", secName, y1, y2) + logger.warning("ERROR in closest grid points: " + secName + " " + str(y1) + " " + str(y2)) else: # linear interpolation, see http://en.wikipedia.org/wiki/Bilinear_interpolation sigma = ((sigma_y1*abs(y2-y) + sigma_y2*abs(y-y1)) / abs(y2-y1)) @@ -154,7 +154,7 @@ def subcellularConn(self, allCellTags, allPopTags): from .. import sim sim.timing('start', 'subConnectTime') - print(' Distributing synapses based on subcellular connectivity rules...') + logger.info(' Distributing synapses based on subcellular connectivity rules...') for subConnParamTemp in list(self.params.subConnParams.values()): # for each conn rule or parameter set subConnParam = subConnParamTemp.copy() @@ -188,7 +188,7 @@ def subcellularConn(self, allCellTags, allPopTags): connGroup['synMech'] = '__grouped__'+connGroup['synMech'] connsGroup[connGroupLabel] = connGroup except: - print(' Warning: Grouped synMechs %s not found' % (str(connGroup))) + logger.warning(' Grouped synMechs %s not found' % (str(connGroup))) else: conns = allConns @@ -265,14 +265,14 @@ def subcellularConn(self, allCellTags, allPopTags): else: secOrig = list(postCell.secs.keys())[0] - #print self.fromtodistance(postCell.secs[secOrig](0.5), postCell.secs['secs'][conn['sec']](conn['loc'])) + #logger.info self.fromtodistance(postCell.secs[secOrig](0.5), postCell.secs['secs'][conn['sec']](conn['loc'])) # different case if has vs doesn't have 3d points # h.distance(sec=h.soma[0], seg=0) # for sec in apical: - # print h.secname() + # logger.info h.secname() # for seg in sec: - # print seg.x, h.distance(seg.x) + # logger.info seg.x, h.distance(seg.x) for i,(conn, newSec, newLoc) in enumerate(zip(conns, newSecs, newLocs)): diff --git a/netpyne/sim/gather.py b/netpyne/sim/gather.py index d5c663551..8d363a318 100644 --- a/netpyne/sim/gather.py +++ b/netpyne/sim/gather.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -15,8 +14,7 @@ import numpy as np from ..specs import Dict, ODict from . import setup - - +from netpyne.logger import logger #------------------------------------------------------------------------------ # Gather data from nodes @@ -41,7 +39,8 @@ def gatherData(gatherLFP=True): sim.timing('start', 'gatherTime') ## Pack data from all hosts if sim.rank==0: - print('\nGathering data...') + logger.info('') + logger.info('Gathering data...') # flag to avoid saving sections data for each cell (saves gather time and space; cannot inspect cell secs or re-simulate) if not sim.cfg.saveCellSecs: @@ -94,7 +93,7 @@ def gatherData(gatherLFP=True): sim.pc.barrier() if sim.rank == 0: # simData - print(' Gathering only sim data...') + logger.info(' Gathering only sim data...') sim.allSimData = Dict() for k in list(gather[0]['simData'].keys()): # initialize all keys of allSimData dict if gatherLFP and k == 'LFP': @@ -243,9 +242,11 @@ def gatherData(gatherLFP=True): sim.pc.barrier() if sim.rank == 0: sim.timing('stop', 'gatherTime') - if sim.cfg.timing: print((' Done; gather time = %0.2f s.' % sim.timingData['gatherTime'])) + if sim.cfg.timing: logger.info(' Done; gather time = %0.2f s.' % sim.timingData['gatherTime']) + + logger.info('') + logger.info('Analyzing...') - print('\nAnalyzing...') sim.totalSpikes = len(sim.allSimData['spkt']) sim.totalSynapses = sum([len(cell['conns']) for cell in sim.net.allCells]) if sim.cfg.createPyStruct: @@ -269,15 +270,15 @@ def gatherData(gatherLFP=True): sim.connsPerCell = 0 sim.synsPerCell = 0 - print((' Cells: %i' % (sim.numCells) )) - print((' Connections: %i (%0.2f per cell)' % (sim.totalConnections, sim.connsPerCell))) + logger.info(' Cells: %i' % (sim.numCells)) + logger.info(' Connections: %i (%0.2f per cell)' % (sim.totalConnections, sim.connsPerCell)) if sim.totalSynapses != sim.totalConnections: - print((' Synaptic contacts: %i (%0.2f per cell)' % (sim.totalSynapses, sim.synsPerCell))) + logger.info(' Synaptic contacts: %i (%0.2f per cell)' % (sim.totalSynapses, sim.synsPerCell)) if 'runTime' in sim.timingData: - print((' Spikes: %i (%0.2f Hz)' % (sim.totalSpikes, sim.firingRate))) - print((' Simulated time: %0.1f s; %i workers' % (sim.cfg.duration/1e3, sim.nhosts))) - print((' Run time: %0.2f s' % (sim.timingData['runTime']))) + logger.info(' Spikes: %i (%0.2f Hz)' % (sim.totalSpikes, sim.firingRate)) + logger.info(' Simulated time: %0.1f s; %i workers' % (sim.cfg.duration/1e3, sim.nhosts)) + logger.info(' Run time: %0.2f s' % (sim.timingData['runTime'])) if sim.cfg.printPopAvgRates and not sim.cfg.gatherOnlySimData: @@ -337,7 +338,8 @@ def gatherDataFromFiles(gatherLFP=True, saveFolder=None, simLabel=None, sim=None allCells = [] allPops = ODict() - print('\nGathering data from files for simulation: %s ...' % (simLabel)) + logger.info('') + logger.info('Gathering data from files for simulation: %s ...' % (simLabel)) simDataVecs = ['spkt', 'spkid', 'stims'] + list(sim.cfg.recordTraces.keys()) singleNodeVecs = ['t'] @@ -355,8 +357,8 @@ def gatherDataFromFiles(gatherLFP=True, saveFolder=None, simLabel=None, sim=None for file in fileList: - print(' Merging data file: %s' % (file)) - + logger.info(' Merging data file: %s' % (file)) + if fileType == 'pkl': with open(os.path.join(nodeDataDir, file), 'rb') as openFile: @@ -407,7 +409,7 @@ def gatherDataFromFiles(gatherLFP=True, saveFolder=None, simLabel=None, sim=None allPopsCellGids[popLabel].extend(popCellGids) elif fileType == 'json': - print('JSON loading not implemented yet.') + logger.warning('JSON loading not implemented yet.') return False if len(allSimData['spkt']) > 0: @@ -427,9 +429,10 @@ def gatherDataFromFiles(gatherLFP=True, saveFolder=None, simLabel=None, sim=None sim.pc.barrier() else: sim.timing('stop', 'gatherTime') - if sim.cfg.timing: print((' Done; gather time = %0.2f s.' % sim.timingData['gatherTime'])) + logger.timing(' Done; gather time = %0.2f s.' % sim.timingData['gatherTime']) - print('\nAnalyzing...') + logger.info('') + logger.info('Analyzing...') sim.totalSpikes = len(sim.allSimData['spkt']) sim.totalSynapses = sum([len(cell['conns']) for cell in sim.net.allCells]) @@ -454,15 +457,15 @@ def gatherDataFromFiles(gatherLFP=True, saveFolder=None, simLabel=None, sim=None sim.connsPerCell = 0 sim.synsPerCell = 0 - print((' Cells: %i' % (sim.numCells) )) - print((' Connections: %i (%0.2f per cell)' % (sim.totalConnections, sim.connsPerCell))) + logger.info(' Cells: %i' % (sim.numCells)) + logger.info(' Connections: %i (%0.2f per cell)' % (sim.totalConnections, sim.connsPerCell)) if sim.totalSynapses != sim.totalConnections: - print((' Synaptic contacts: %i (%0.2f per cell)' % (sim.totalSynapses, sim.synsPerCell))) - print((' Spikes: %i (%0.2f Hz)' % (sim.totalSpikes, sim.firingRate))) + logger.info(' Synaptic contacts: %i (%0.2f per cell)' % (sim.totalSynapses, sim.synsPerCell)) + logger.info(' Spikes: %i (%0.2f Hz)' % (sim.totalSpikes, sim.firingRate)) if 'runTime' in sim.timingData: - print((' Simulated time: %0.1f s; %i workers' % (sim.cfg.duration/1e3, sim.nhosts))) - print((' Run time: %0.2f s' % (sim.timingData['runTime']))) + logger.info(' Simulated time: %0.1f s; %i workers' % (sim.cfg.duration/1e3, sim.nhosts)) + logger.info(' Run time: %0.2f s' % (sim.timingData['runTime'])) if sim.cfg.printPopAvgRates and not sim.cfg.gatherOnlySimData: trange = sim.cfg.printPopAvgRates if isinstance(sim.cfg.printPopAvgRates, list) else None @@ -534,7 +537,7 @@ def _gatherCells(): ## Pack data from all hosts if sim.rank==0: - print('\nUpdating sim.net.allCells...') + logger.info('\nUpdating sim.net.allCells...') if sim.nhosts > 1: # only gather if >1 nodes nodeData = {'netCells': [c.__getstate__() for c in sim.net.cells]} diff --git a/netpyne/sim/load.py b/netpyne/sim/load.py index f7ce69e98..85487e56c 100644 --- a/netpyne/sim/load.py +++ b/netpyne/sim/load.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import @@ -29,6 +28,7 @@ from .. import specs from . import utils from . import setup +from netpyne.logger import logger #------------------------------------------------------------------------------ # Load data from file @@ -60,7 +60,7 @@ def _byteify(data, ignore_dicts = False): # load pickle file if ext == 'pkl': import pickle - print(('Loading file %s ... ' % (filename))) + logger.info('Loading file %s ... ' % (filename)) with open(filename, 'rb') as fileObj: if sys.version_info[0] == 2: data = pickle.load(fileObj) @@ -70,65 +70,65 @@ def _byteify(data, ignore_dicts = False): # load dpk file elif ext == 'dpk': import gzip - print(('Loading file %s ... ' % (filename))) + logger.warning('Loading file %s ... ' % (filename)) #fn=sim.cfg.filename #.split('.') #gzip.open(fn, 'wb').write(pk.dumps(dataSave)) # write compressed string - print('NOT IMPLEMENTED!') + logger.warning('NOT IMPLEMENTED!') # load json file elif ext == 'json': import json - print(('Loading file %s ... ' % (filename))) + logger.info('Loading file %s ... ' % (filename)) with open(filename, 'r') as fileObj: data = json.load(fileObj) # works with py2 and py3 # load mat file elif ext == 'mat': from scipy.io import loadmat - print(('Loading file %s ... ' % (filename))) + logger.info('Loading file %s ... ' % (filename)) dataraw = loadmat(filename, struct_as_record=False, squeeze_me=True) data = utils._mat2dict(dataraw) #savemat(sim.cfg.filename+'.mat', replaceNoneObj(dataSave)) # replace None and {} with [] so can save in .mat format - print('Finished saving!') + logger.info('Finished saving!') # load HDF5 file (uses very inefficient hdf5storage module which supports dicts) elif ext == 'saveHDF5': #dataSaveUTF8 = _dict2utf8(replaceNoneObj(dataSave)) # replace None and {} with [], and convert to utf import hdf5storage - print(('Loading file %s ... ' % (filename))) + logger.warning('Loading file %s ... ' % (filename)) #hdf5storage.writes(dataSaveUTF8, filename=sim.cfg.filename+'.hdf5') - print('NOT IMPLEMENTED!') + logger.warning('NOT IMPLEMENTED!') # load CSV file (currently only saves spikes) elif ext == 'csv': import csv - print(('Loading file %s ... ' % (filename))) + logger.warning('Loading file %s ... ' % (filename)) writer = csv.writer(open(sim.cfg.filename+'.csv', 'wb')) #for dic in dataSave['simData']: # for values in dic: # writer.writerow(values) - print('NOT IMPLEMENTED!') + logger.warning('NOT IMPLEMENTED!') # load Dat file(s) elif ext == 'dat': - print(('Loading file %s ... ' % (filename))) - print('NOT IMPLEMENTED!') + logger.warning('Loading file %s ... ' % (filename)) + logger.warning('NOT IMPLEMENTED!') # traces = sim.cfg.recordTraces # for ref in traces.keys(): # for cellid in sim.allSimData[ref].keys(): # dat_file_name = '%s_%s.dat'%(ref,cellid) # dat_file = open(dat_file_name, 'w') # trace = sim.allSimData[ref][cellid] - # print("Saving %i points of data on: %s:%s to %s"%(len(trace),ref,cellid,dat_file_name)) + # logger.info("Saving %i points of data on: %s:%s to %s"%(len(trace),ref,cellid,dat_file_name)) # for i in range(len(trace)): # dat_file.write('%s\t%s\n'%((i*sim.cfg.dt/1000),trace[i]/1000)) else: - print(('Format not recognized for file %s'%(filename))) + logger.warning('Format not recognized for file %s'%(filename)) return if hasattr(sim, 'rank') and sim.rank == 0 and hasattr(sim, 'cfg') and sim.cfg.timing: sim.timing('stop', 'loadFileTime') - print((' Done; file loading time = %0.2f s' % sim.timingData['loadFileTime'])) + logger.info(' Done; file loading time = %0.2f s' % sim.timingData['loadFileTime']) return data @@ -163,14 +163,14 @@ def loadSimCfg(filename, data=None, setLoaded=True): if not data: data = _loadFile(filename) - print('Loading simConfig...') + logger.info('Loading simConfig...') if 'simConfig' in data: if setLoaded: setup.setSimCfg(data['simConfig']) else: return specs.SimConfig(data['simConfig']) else: - print((' simConfig not found in file %s'%(filename))) + logger.warning(' simConfig not found in file %s'%(filename)) pass @@ -202,14 +202,14 @@ def loadNetParams(filename, data=None, setLoaded=True): if not data: data = _loadFile(filename) - print('Loading netParams...') + logger.info('Loading netParams...') if 'net' in data and 'params' in data['net']: if setLoaded: setup.setNetParams(data['net']['params']) else: return specs.NetParams(data['net']['params']) else: - print(('netParams not found in file %s'%(filename))) + logger.warning('netParams not found in file %s'%(filename)) pass @@ -258,7 +258,7 @@ def loadNet(filename, data=None, instantiate=True, compactConnFormat=False): loadNow = False if loadNow: sim.timing('start', 'loadNetTime') - print('Loading net...') + logger.info('Loading net...') if compactConnFormat: compactToLongConnFormat(data['net']['cells'], compactConnFormat) # convert loaded data to long format sim.net.allPops = data['net']['pops'] @@ -277,7 +277,7 @@ def loadNet(filename, data=None, instantiate=True, compactConnFormat=False): else: cellsNode = [data['net']['cells'][i] for i in range(0, len(data['net']['cells']), 1)] except: - print('Unable to instantiate network...') + logger.warning('Unable to instantiate network...') try: if sim.cfg.createPyStruct: @@ -298,29 +298,29 @@ def loadNet(filename, data=None, instantiate=True, compactConnFormat=False): cell.create() sim.cfg.createNEURONObj = createNEURONObjorig except: - if sim.cfg.verbose: print(' Unable to load cell secs') + logger.debug(' Unable to load cell secs') try: cell.conns = [Dict(conn) for conn in cellLoad['conns']] except: - if sim.cfg.verbose: print(' Unable to load cell conns') + logger.debug(' Unable to load cell conns') try: cell.stims = [Dict(stim) for stim in cellLoad['stims']] except: - if sim.cfg.verbose: print(' Unable to load cell stims') + logger.debug(' Unable to load cell stims') sim.net.cells.append(cell) - print((' Created %d cells' % (len(sim.net.cells)))) - print((' Created %d connections' % (sum([len(c.conns) for c in sim.net.cells])))) - print((' Created %d stims' % (sum([len(c.stims) for c in sim.net.cells])))) + logger.info(' Created %d cells' % (len(sim.net.cells))) + logger.info(' Created %d connections' % (sum([len(c.conns) for c in sim.net.cells]))) + logger.info(' Created %d stims' % (sum([len(c.stims) for c in sim.net.cells]))) except: - print('Unable to create Python structure...') + logger.warning('Unable to create Python structure...') try: # only create NEURON objs, if there is Python struc (fix so minimal Python struct is created) if sim.cfg.createNEURONObj: - if sim.cfg.verbose: print(" Adding NEURON objects...") + logger.debug(" Adding NEURON objects...") # create NEURON sections, mechs, syns, etc; and associate gid for cell in sim.net.cells: prop = {'secs': cell.secs} @@ -333,17 +333,17 @@ def loadNet(filename, data=None, instantiate=True, compactConnFormat=False): cell.addStimsNEURONObj() # add stims first so can then create conns between netstims cell.addConnsNEURONObj() except: - if sim.cfg.verbose: ' Unable to load instantiate cell conns or stims' + logger.debug(' Unable to load instantiate cell conns or stims') - print((' Added NEURON objects to %d cells' % (len(sim.net.cells)))) + logger.info(' Added NEURON objects to %d cells' % (len(sim.net.cells))) except: - print('Unable to create NEURON objects...') + logger.warning('Unable to create NEURON objects...') if loadNow and sim.cfg.timing: #if sim.rank == 0 and sim.cfg.timing: sim.timing('stop', 'loadNetTime') - print((' Done; re-instantiate net time = %0.2f s' % sim.timingData['loadNetTime'])) + logger.info(' Done; re-instantiate net time = %0.2f s' % sim.timingData['loadNetTime']) else: - print((' netCells and/or netPops not found in file %s'%(filename))) + logger.warning(' netCells and/or netPops not found in file %s'%(filename)) #------------------------------------------------------------------------------ @@ -370,12 +370,12 @@ def loadSimData(filename, data=None): if not data: data = _loadFile(filename) - print('Loading simData...') + logger.info('Loading simData...') if 'simData' in data: sim.allSimData = data['simData'] else: - print((' simData not found in file %s'%(filename))) + logger.warning(' simData not found in file %s'%(filename)) if 'net' in data: try: @@ -426,7 +426,7 @@ def loadAll(filename, data=None, instantiate=True, createNEURONObj=True): if hasattr(sim.cfg, 'compactConnFormat'): connFormat = sim.cfg.compactConnFormat else: - print('Error: no connFormat provided in simConfig') + logger.warning('Error: no connFormat provided in simConfig') sys.exit() loadNet(filename, data=data, instantiate=instantiate, compactConnFormat=connFormat) loadSimData(filename, data=data) @@ -461,7 +461,7 @@ def compactToLongConnFormat(cells, connFormat): cell['conns'][iconn] = {key: conn[index] for key,index in formatIndices.items()} return cells except: - print("Error converting conns from compact to long format") + logger.warning("Error converting conns from compact to long format") return cells @@ -569,13 +569,13 @@ def ijsonLoad(filename, tagsGidRange=None, connsGidRange=None, loadTags=True, lo with open(filename, 'rb') as fd: start = time() - print('Loading data ...') + logger.info('Loading data ...') objs = ijson.items(fd, 'net.cells.item') if loadTags and loadConns: - print('Storing tags and conns ...') + logger.info('Storing tags and conns ...') for cell in objs: if tagsGidRange==None or cell['gid'] in tagsGidRange: - print('Cell gid: %d'%(cell['gid'])) + logger.info('Cell gid: %d'%(cell['gid'])) if tagFormat: tags[int(cell['gid'])] = [cell['tags'][param] for param in tagFormat] else: @@ -586,30 +586,30 @@ def ijsonLoad(filename, tagsGidRange=None, connsGidRange=None, loadTags=True, lo else: conns[int(cell['gid'])] = cell['conns'] elif loadTags: - print('Storing tags ...') + logger.info('Storing tags ...') if tagFormat: tags.update({int(cell['gid']): [cell['tags'][param] for param in tagFormat] for cell in objs if tagsGidRange==None or cell['gid'] in tagsGidRange}) else: tags.update({int(cell['gid']): cell['tags'] for cell in objs if tagsGidRange==None or cell['gid'] in tagsGidRange}) elif loadConns: - print('Storing conns...') + logger.info('Storing conns...') if connFormat: conns.update({int(cell['gid']): [[conn[param] for param in connFormat] for conn in cell['conns']] for cell in objs if connsGidRange==None or cell['gid'] in connsGidRange}) else: conns.update({int(cell['gid']): cell['conns'] for cell in objs if connsGidRange==None or cell['gid'] in connsGidRange}) - print('time ellapsed (s): ', time() - start) + logger.info('time ellapsed (s): ', time() - start) tags = utils.decimalToFloat(tags) conns = utils.decimalToFloat(conns) if saveTags and tags: outFilename = saveTags if isinstance(saveTags, basestring) else 'filename'[:-4]+'_tags.json' - print('Saving tags to %s ...' % (outFilename)) + logger.info('Saving tags to %s ...' % (outFilename)) sim.saveJSON(outFilename, {'tags': tags}) if saveConns and conns: outFilename = saveConns if isinstance(saveConns, basestring) else 'filename'[:-4]+'_conns.json' - print('Saving conns to %s ...' % (outFilename)) + logger.info('Saving conns to %s ...' % (outFilename)) sim.saveJSON(outFilename, {'conns': conns}) return tags, conns diff --git a/netpyne/sim/run.py b/netpyne/sim/run.py index de4a87d04..a91cb02a0 100644 --- a/netpyne/sim/run.py +++ b/netpyne/sim/run.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -17,7 +16,7 @@ import numpy as np from neuron import h, init # Import NEURON from . import utils - +from netpyne.logger import logger #------------------------------------------------------------------------------ @@ -56,13 +55,13 @@ def preRun(): # parallelcontext vars sim.pc.set_maxstep(10) mindelay = sim.pc.allreduce(sim.pc.set_maxstep(10), 2) # flag 2 returns minimum value - if sim.rank==0 and sim.cfg.verbose: print(('Minimum delay (time-step for queue exchange) is %.2f'%(mindelay))) + if sim.rank==0: logger.debug('Minimum delay (time-step for queue exchange) is %.2f'%(mindelay)) sim.pc.setup_transfer() # setup transfer of source_var to target_var # handler for printing out time during simulation run if sim.rank == 0 and sim.cfg.printRunTime: def printRunTime(): - print('%.1fs' % (h.t/1000.0)) + logger.info('%.1fs' % (h.t/1000.0)) sim.cvode.event(h.t + int(sim.cfg.printRunTime*1000.0), sim.printRunTime) sim.printRunTime = printRunTime @@ -82,7 +81,7 @@ def printRunTime(): cell.hPointp.noiseFromRandom(cell.hRandom) pop = sim.net.pops[cell.tags['pop']] if 'originalFormat' in pop.tags and pop.tags['originalFormat'] == 'NeuroML2_SpikeSource': - if sim.cfg.verbose: print("== Setting random generator in NeuroML spike generator") + logger.debug("== Setting random generator in NeuroML spike generator") cell.initRandom() else: for stim in cell.stims: @@ -130,9 +129,9 @@ def runSim(skipPreRun=False): if hasattr(sim.cfg,'use_local_dt') and sim.cfg.use_local_dt: try: sim.cvode.use_local_dt(1) - if sim.cfg.verbose: print('Using local dt.') + logger.debug('Using local dt.') except: - if sim.cfg.verbose: 'Error Failed to use local dt.' + logger.debug('Error Failed to use local dt.') sim.pc.barrier() sim.timing('start', 'runTime') @@ -141,13 +140,13 @@ def runSim(skipPreRun=False): h.finitialize(float(sim.cfg.hParams['v_init'])) - if sim.rank == 0: print('\nRunning simulation for %s ms...'%sim.cfg.duration) + if sim.rank == 0: logger.info('\nRunning simulation for %s ms...'%sim.cfg.duration) sim.pc.psolve(sim.cfg.duration) sim.pc.barrier() # Wait for all hosts to get to this point sim.timing('stop', 'runTime') if sim.rank==0: - print(' Done; run time = %0.2f s; real-time ratio: %0.2f.' % + logger.info(' Done; run time = %0.2f s; real-time ratio: %0.2f.' % (sim.timingData['runTime'], sim.cfg.duration/1000/sim.timingData['runTime'])) @@ -197,7 +196,7 @@ def runSimWithIntervalFunc(interval, func, timeRange=None, funcArgs=None): kwargs.update(funcArgs) if sim.rank == 0: - print('\nRunning with interval func ...') + logger.info('\nRunning with interval func...') if int(startTime) != 0: sim.pc.psolve(startTime) @@ -213,8 +212,8 @@ def runSimWithIntervalFunc(interval, func, timeRange=None, funcArgs=None): sim.pc.barrier() # Wait for all hosts to get to this point sim.timing('stop', 'runTime') if sim.rank==0: - print((' Done; run time = %0.2f s; real-time ratio: %0.2f.' % - (sim.timingData['runTime'], sim.cfg.duration/1000/sim.timingData['runTime']))) + logger.info(' Done; run time = %0.2f s; real-time ratio: %0.2f.' % + (sim.timingData['runTime'], sim.cfg.duration/1000/sim.timingData['runTime'])) #------------------------------------------------------------------------------ @@ -277,13 +276,13 @@ def loadBalance(printNodeTimes = False): load_balance = avg_comp_time/max_comp_time if printNodeTimes: - print('node:',sim.rank,' comp_time:',computation_time) + logger.info('node: ' + str(sim.rank) + ' comp_time: ' + str(computation_time)) if sim.rank==0: - print('max_comp_time:', max_comp_time) - print('min_comp_time:', min_comp_time) - print('avg_comp_time:', avg_comp_time) - print('load_balance:',load_balance) - print('\nspike exchange time (run_time-comp_time): ', sim.timingData['runTime'] - max_comp_time) + logger.info('max_comp_time:' + str(max_comp_time)) + logger.info('min_comp_time:' + str(min_comp_time)) + logger.info('avg_comp_time:' + str(avg_comp_time)) + logger.info('load_balance:' + str(load_balance)) + logger.info('spike exchange time (run_time-comp_time): ' + str(sim.timingData['runTime'] - max_comp_time)) return [max_comp_time, min_comp_time, avg_comp_time, load_balance] diff --git a/netpyne/sim/save.py b/netpyne/sim/save.py index 60fd9cb9e..e3b4b45b5 100644 --- a/netpyne/sim/save.py +++ b/netpyne/sim/save.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -26,6 +25,7 @@ from . import gather from . import utils from ..specs import Dict, ODict +from netpyne.logger import logger #------------------------------------------------------------------------------ @@ -95,7 +95,7 @@ def saveData(include=None, filename=None, saveLFP=True): # copy source files if isinstance(sim.cfg.backupCfgFile, list) and len(sim.cfg.backupCfgFile) == 2: simName = sim.cfg.simLabel if sim.cfg.simLabel else os.path.basename(sim.cfg.filename) - print(('Copying cfg file %s ... ' % simName)) + logger.info('Copying cfg file %s ... ' % simName) source = sim.cfg.backupCfgFile[0] targetFolder = sim.cfg.backupCfgFile[1] # make dir @@ -103,11 +103,11 @@ def saveData(include=None, filename=None, saveLFP=True): os.mkdir(targetFolder) except OSError: if not os.path.exists(targetFolder): - print(' Could not create target folder: %s' % (targetFolder)) + logger.warning(' Could not create target folder: %s' % targetFolder) # copy file targetFile = targetFolder + '/' + simName + '_cfg.py' if os.path.exists(targetFile): - print(' Removing prior cfg file' , targetFile) + logger.info(' Removing prior cfg file ' + targetFile) os.system('rm ' + targetFile) os.system('cp ' + source + ' ' + targetFile) @@ -118,7 +118,7 @@ def saveData(include=None, filename=None, saveLFP=True): try: os.mkdir(targetFolder) except OSError: - print(' Could not create target folder: %s' % (targetFolder)) + logger.warning(' Could not create target folder: %s' % targetFolder) # saving data if not include: include = sim.cfg.saveDataInclude @@ -161,52 +161,52 @@ def saveData(include=None, filename=None, saveLFP=True): if sim.cfg.savePickle: import pickle dataSave = utils.replaceDictODict(dataSave) - print(('Saving output as %s ... ' % (filePath + '.pkl'))) + logger.info('Saving output as %s... ' % (filePath + '.pkl')) with open(filePath+'.pkl', 'wb') as fileObj: pickle.dump(dataSave, fileObj) - print('Finished saving!') + logger.info('Finished saving!') # Save to dpk file if sim.cfg.saveDpk: import gzip - print(('Saving output as %s ... ' % (filePath+'.dpk'))) + logger.info('Saving output as %s ... ' % (filePath+'.dpk')) #fn=filePath #.split('.') gzip.open(filePath, 'wb').write(pk.dumps(dataSave)) # write compressed string - print('Finished saving!') + logger.info('Finished saving!') # Save to json file if sim.cfg.saveJson: # Make it work for Python 2+3 and with Unicode - print(('Saving output as %s ... ' % (filePath+'.json '))) + logger.info('Saving output as %s ... ' % (filePath+'.json ')) #dataSave = utils.replaceDictODict(dataSave) # not required since json saves as dict sim.saveJSON(filePath+'.json', dataSave) - print('Finished saving!') + logger.info('Finished saving!') # Save to mat file if sim.cfg.saveMat: from scipy.io import savemat - print(('Saving output as %s ... ' % (filePath+'.mat'))) + logger.info('Saving output as %s ... ' % (filePath+'.mat')) savemat(filePath+'.mat', utils.tupleToList(utils.replaceNoneObj(dataSave))) # replace None and {} with [] so can save in .mat format - print('Finished saving!') + logger.info('Finished saving!') # Save to HDF5 file (uses very inefficient hdf5storage module which supports dicts) if sim.cfg.saveHDF5: dataSaveUTF8 = utils._dict2utf8(utils.replaceNoneObj(dataSave)) # replace None and {} with [], and convert to utf import hdf5storage - print(('Saving output as %s... ' % (filePath+'.hdf5'))) + logger.info('Saving output as %s... ' % (filePath+'.hdf5')) hdf5storage.writes(dataSaveUTF8, filename=filePath+'.hdf5') - print('Finished saving!') + logger.info('Finished saving!') # Save to CSV file (currently only saves spikes) if sim.cfg.saveCSV: if 'simData' in dataSave: import csv - print(('Saving output as %s ... ' % (filePath+'.csv'))) + logger.info('Saving output as %s ... ' % (filePath+'.csv')) writer = csv.writer(open(filePath+'.csv', 'wb')) for dic in dataSave['simData']: for values in dic: writer.writerow(values) - print('Finished saving!') + logger.info('Finished saving!') # Save to Dat file(s) if sim.cfg.saveDat: @@ -216,17 +216,16 @@ def saveData(include=None, filename=None, saveLFP=True): dat_file_name = '%s_%s.dat'%(ref,cellid) dat_file = open(dat_file_name, 'w') trace = sim.allSimData[ref][cellid] - print(("Saving %i points of data on: %s:%s to %s"%(len(trace),ref,cellid,dat_file_name))) + logger.info("Saving %i points of data on: %s:%s to %s"%(len(trace),ref,cellid,dat_file_name)) for i in range(len(trace)): dat_file.write('%s\t%s\n'%((i*sim.cfg.dt/1000),trace[i]/1000)) dat_file.close() - print('Finished saving!') + logger.info('Finished saving!') # Save timing - if sim.cfg.timing: - sim.timing('stop', 'saveTime') - print((' Done; saving time = %0.2f s.' % sim.timingData['saveTime'])) + sim.timing('stop', 'saveTime') + logger.timing(' Done; saving time = %0.2f s.' % sim.timingData['saveTime']) if sim.cfg.timing and sim.cfg.saveTiming: import pickle with open('timing.pkl', 'wb') as file: pickle.dump(sim.timing, file) @@ -242,7 +241,7 @@ def saveData(include=None, filename=None, saveLFP=True): return os.getcwd() + '/' + filePath else: - print('Nothing to save') + logger.info('Nothing to save') #------------------------------------------------------------------------------ @@ -333,7 +332,7 @@ def intervalSave(simTime, gatherLFP=True): try: os.makedirs(targetFolder) except OSError: - print(' Could not create target folder: %s' % (targetFolder)) + logger.warning(' Could not create target folder: %s' % targetFolder) include = sim.cfg.saveDataInclude @@ -349,7 +348,6 @@ def intervalSave(simTime, gatherLFP=True): for k,v in nodeData.items(): data[0][k] = v - #print data gather = sim.pc.py_alltoall(data) sim.pc.barrier() if sim.rank == 0: @@ -437,7 +435,7 @@ def intervalSave(simTime, gatherLFP=True): simDataVecs = simDataVecs + ['allWeights'] if sim.rank == 0: # simData - print(' Saving data at intervals... {:0.0f} ms'.format(simTime)) + logger.info(' Saving data at intervals... {:0.0f} ms'.format(simTime)) sim.allSimData = Dict() for k in list(gather[0]['simData'].keys()): # initialize all keys of allSimData dict if gatherLFP and k == 'LFP': @@ -584,7 +582,7 @@ def saveDataInNodes(filename=None, saveLFP=True, removeTraces=False, saveFolder= sim.pc.barrier() if sim.rank == 0: - print('\nSaving an output file for each node in: %s' % (saveFolder)) + logger.info('\nSaving an output file for each node in: %s' % (saveFolder)) # saving data dataSave = {} @@ -665,25 +663,24 @@ def saveDataInNodes(filename=None, saveLFP=True, removeTraces=False, saveFolder= import pickle dataSave = utils.replaceDictODict(dataSave) fileName = filePath + '_node_' + str(sim.rank) + '.pkl' - print((' Saving output as: %s ... ' % (fileName))) + logger.info(' Saving output as: %s ... ' % (fileName)) with open(os.path.join(saveFolder, fileName), 'wb') as fileObj: pickle.dump(dataSave, fileObj) except: - print('Unable to save Pickle') + logger.warning('Unable to save Pickle') return dataSave # Save to json file if sim.cfg.saveJson: fileName = filePath + '_node_' + str(sim.rank) + '.json' - print((' Saving output as: %s ... ' % (fileName))) + logger.info(' Saving output as: %s ... ' % (fileName)) sim.saveJSON(os.path.join(saveFolder, fileName), dataSave) # Save timing sim.pc.barrier() if sim.rank == 0: - if sim.cfg.timing: - sim.timing('stop', 'saveInNodeTime') - print((' Done; saving time = %0.2f s.' % sim.timingData['saveInNodeTime'])) + sim.timing('stop', 'saveInNodeTime') + logger.timing(' Done; saving time = %0.2f s.' % sim.timingData['saveInNodeTime']) if sim.cfg.timing and sim.cfg.saveTiming: import pickle with open('timing.pkl', 'wb') as file: pickle.dump(sim.timing, file) diff --git a/netpyne/sim/setup.py b/netpyne/sim/setup.py index 75afcc707..9d2765b43 100644 --- a/netpyne/sim/setup.py +++ b/netpyne/sim/setup.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -19,6 +18,7 @@ from .. import specs from ..specs import Dict, ODict from . import utils +from netpyne.logger import logger try: from datetime import datetime except: @@ -57,7 +57,7 @@ def initialize(netParams = None, simConfig = None, net = None): if netParams is None: netParams = {} # If not specified, initialize as empty dict if simConfig is None: simConfig = {} # If not specified, initialize as empty dict if hasattr(simConfig, 'popParams') or hasattr(netParams, 'duration'): - print('Error: seems like the sim.initialize() arguments are in the wrong order, try initialize(netParams, simConfig)') + logger.warning('Error: seems like the sim.initialize() arguments are in the wrong order, try initialize(netParams, simConfig)') sys.exit() # for testing validation @@ -77,7 +77,7 @@ def initialize(netParams = None, simConfig = None, net = None): if sim.rank == 0: try: - print('\nStart time: ', datetime.now()) + logger.info('\nStart time: ' + str(datetime.now())) except: pass sim.timing('start', 'initialTime') @@ -99,7 +99,7 @@ def initialize(netParams = None, simConfig = None, net = None): simTestObj.netParams = sim.net.params simTestObj.runTests() except: - print("\nAn exception occurred during the error checking process...") + logger.warning("An exception occurred during the error checking process...") sim.timing('stop', 'initialTime') @@ -238,7 +238,7 @@ def readCmdLineArgs(simConfigDefault='cfg.py', netParamsDefault='netParams.py'): if len(sys.argv) > 1: - print('\nReading command line arguments using syntax: python file.py [simConfig=filepath] [netParams=filepath]') + logger.info('Reading command line arguments using syntax: python file.py [simConfig=filepath] [netParams=filepath]') cfgPath = None netParamsPath = None @@ -260,7 +260,7 @@ def readCmdLineArgs(simConfigDefault='cfg.py', netParamsDefault='netParams.py'): except: # py2 netParamsModule = imp.load_source(os.path.basename(netParamsPath).split('.')[0], netParamsPath) netParams = netParamsModule.netParams - print('Importing netParams from %s' %(netParamsPath)) + logger.info('Importing netParams from %s' % netParamsPath) if not cfgPath: try: @@ -274,9 +274,7 @@ def readCmdLineArgs(simConfigDefault='cfg.py', netParamsDefault='netParams.py'): cfg = cfgModule.cfg __main__.cfg = cfg except: - print('\nWarning: Could not load cfg from command line path or from default cfg.py') - print('This usually occurs when cfg.py crashes. Please ensure that your cfg.py file') - print('completes successfully on its own (i.e. execute "python cfg.py" and fix any bugs).') + logger.warning('Could not load cfg from command line path or from default cfg.py. \nThis usually occurs when cfg.py crashes. Please ensure that your cfg.py file completes successfully on its own (i.e. execute "python cfg.py" and fix any bugs).') cfg = None if not netParamsPath: @@ -290,9 +288,7 @@ def readCmdLineArgs(simConfigDefault='cfg.py', netParamsDefault='netParams.py'): netParams = netParamsModule.netParams except: - print('\nWarning: Could not load netParams from command line path or from default netParams.py') - print('This usually occurs when netParams.py crashes. Please ensure that your netParams.py file') - print('completes successfully on its own (i.e. execute "python netParams.py" and fix any bugs).') + logger.warning('Could not load netParams from command line path or from default netParams.py. \nThis usually occurs when netParams.py crashes. Please ensure that your netParams.py file completes successfully on its own (i.e. execute "python netParams.py" and fix any bugs).') netParams = None return cfg, netParams @@ -423,18 +419,18 @@ def setupRecording(): else: sim.simData['t'].record(h._ref_t, sim.cfg.recordStep) except: - if sim.cfg.verbose: 'Error recording h.t (could be due to no sections existing)' + logger.debug('Error recording h.t (could be due to no sections existing)') - # print recorded traces + # log recorded traces cat = 0 total = 0 for key in sim.simData: - if sim.cfg.verbose: print((" Recording: %s:"%key)) + logger.debug(" Recording: %s:"%key) if len(sim.simData[key])>0: cat+=1 for k2 in sim.simData[key]: - if sim.cfg.verbose: print((" %s"%k2)) + logger.debug(" %s"%k2) total+=1 - print(("Recording %s traces of %s types on node %i"%(total, cat, sim.rank))) + logger.info("Recording %s traces of %s types on node %i"%(total, cat, sim.rank)) # set LFP recording if sim.cfg.recordLFP: @@ -465,25 +461,25 @@ def setGlobals(): for k,v in cellRule.get('globals', {}).items(): if k not in cellGlobs: cellGlobs[k] = v - elif cellGlobs[k] != v and sim.cfg.verbose: + elif cellGlobs[k] != v: if k == 'v_init': wrongVinit = [s['vinit'] for s in list(cellRule['secs'].values()) if 'vinit' in s and s['vinit'] == v and s['vinit'] != cellGlobs[k]] # check if set inside secs (set by default during import) if len(wrongVinit) == len(cellRule['secs']): - print("\nWarning: global variable %s=%s differs from that set for each section in cellParams rule %s: %s" % (k, str(cellGlobs[k]), cellRuleName, str(v))) + logger.debug("Global variable %s=%s differs from that set for each section in cellParams rule %s: %s" % (k, str(cellGlobs[k]), cellRuleName, str(v))) else: # no need since v_inits set in each sec during import - print("\nWarning: global variable %s=%s differs from that defined (not used) in the 'globals' of cellParams rule %s: %s" % (k, str(cellGlobs[k]), cellRuleName, str(v))) + logger.debug("Global variable %s=%s differs from that defined (not used) in the 'globals' of cellParams rule %s: %s" % (k, str(cellGlobs[k]), cellRuleName, str(v))) else: - print("\nWarning: global variable %s=%s differs from that defined (not used) in the 'globals' of cellParams rule %s: %s" % (k, str(cellGlobs[k]), cellRuleName, str(v))) + logger.debug("Global variable %s=%s differs from that defined (not used) in the 'globals' of cellParams rule %s: %s" % (k, str(cellGlobs[k]), cellRuleName, str(v))) # add tstop as global (for ease of transition with standard NEURON) cellGlobs['tstop'] = float(sim.cfg.duration) # h global params - if sim.cfg.verbose and len(cellGlobs) > 0: - print('\nSetting h global variables ...') + if len(cellGlobs) > 0: + logger.debug('Setting h global variables ...') for key,val in cellGlobs.items(): try: h('%s=%s'%(key,val)) - if sim.cfg.verbose: print((' h.%s = %s' % (key, str(val)))) + logger.debug(' h.%s = %s' % (key, str(val))) except: - print('\nError: could not set global %s = %s' % (key, str(val))) + logger.warning('Error: could not set global %s = %s' % (key, str(val))) diff --git a/netpyne/sim/utils.py b/netpyne/sim/utils.py index 21ac10d13..1e46375b0 100644 --- a/netpyne/sim/utils.py +++ b/netpyne/sim/utils.py @@ -27,7 +27,7 @@ from collections import OrderedDict from neuron import h# Import NEURON from ..specs import Dict, ODict - +from netpyne.logger import logger #------------------------------------------------------------------------------ @@ -174,7 +174,7 @@ def version(show=True): from netpyne import __version__ if show: - print(__version__) + logger.info(__version__) return __version__ @@ -302,17 +302,17 @@ def checkMemory(): # print memory diagnostic info if sim.rank == 0: # and checkMemory: import resource - print('\nMEMORY -----------------------') - print('Sections: ') - print(h.topology()) - print('NetCons: ') - print(len(h.List("NetCon"))) - print('NetStims:') - print(len(h.List("NetStim"))) - print('\n Memory usage: %s \n' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + logger.info('\nMEMORY -----------------------') + logger.info('Sections: ') + logger.info(h.topology()) + logger.info('NetCons: ') + logger.info(len(h.List("NetCon"))) + logger.info('NetStims:') + logger.info(len(h.List("NetStim"))) + logger.info('\n Memory usage: %s \n' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) # import objgraph # objgraph.show_most_common_types() - print('--------------------------------\n') + logger.info('--------------------------------\n') #------------------------------------------------------------------------------ diff --git a/netpyne/sim/wrappers.py b/netpyne/sim/wrappers.py index dd64cc382..c45fda8c7 100644 --- a/netpyne/sim/wrappers.py +++ b/netpyne/sim/wrappers.py @@ -10,6 +10,7 @@ from __future__ import absolute_import from future import standard_library standard_library.install_aliases() +from netpyne.logger import logger #------------------------------------------------------------------------------ # Wrapper to create network @@ -251,7 +252,7 @@ def createSimulateAnalyzeInterval(netParams, simConfig, output=False, interval=N os.mkdir('temp') sim.intervalSimulate(interval) except Exception as e: - print(e) + logger.warning(e) return sim.pc.barrier() sim.analyze() diff --git a/netpyne/specs/netParams.py b/netpyne/specs/netParams.py index 0ab0af435..e5940892c 100644 --- a/netpyne/specs/netParams.py +++ b/netpyne/specs/netParams.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -23,6 +22,7 @@ from collections import OrderedDict from .dicts import Dict, ODict from .. import conversion +from netpyne.logger import logger # ---------------------------------------------------------------------------- # PopParams class @@ -326,13 +326,13 @@ def save(self, filename): os.mkdir(folder) except OSError: if not os.path.exists(folder): - print(' Could not create', folder) + logger.warning(' Could not create ' + folder) dataSave = {'net': {'params': self.__dict__}} # Save to json file if ext == 'json': - print(('Saving netParams to %s ... ' % (filename))) + logger.info('Saving netParams to %s ... ' % (filename)) sim.saveJSON(filename, dataSave) def addCellParams(self, label=None, params=None): @@ -381,11 +381,11 @@ def addStimTargetParams(self, label=None, params=None): # try: # obj = getattr(self, attr) # except: - # print 'Error renaming: netParams does not contain %s' % (attr) + # logger.warning 'Error renaming: netParams does not contain %s' % (attr) # return False # if old not in obj: - # print 'Error renaming: netParams.%s rule does not contain %s' % (attribute, old) + # logger.warning 'Error renaming: netParams.%s rule does not contain %s' % (attribute, old) # return False # obj[new] = obj.pop(old) # replace @@ -406,7 +406,7 @@ def importCellParams(self, label, fileName, cellName, conds={}, cellArgs=None, i somaSec = next((sec for sec in cellRule['secs'] if 'soma' in sec), None) if not somaSec or not 'pt3d' in cellRule['secs'][somaSec]['geom']: pass - #print('Warning: cannot place soma at origin because soma does not exist or does not contain pt3d') + #logger.warning('Cannot place soma at origin because soma does not exist or does not contain pt3d') else: soma3d = cellRule['secs'][somaSec]['geom']['pt3d'] midpoint = int(len(soma3d)/2) @@ -434,15 +434,15 @@ def addCellParamsSecList(self, label, secListName, somaDist=None, somaDistY=None if label in self.cellParams: cellRule = self.cellParams[label] else: - print('Error adding secList: netParams.cellParams does not contain %s' % (label)) + logger.warning('Error adding secList: netParams.cellParams does not contain %s' % (label)) return if somaDist is not None and (not isinstance(somaDist, list) or len(somaDist) != 2): - print('Error adding secList: somaDist should be a list with 2 elements') + logger.warning('Error adding secList: somaDist should be a list with 2 elements') return if somaDistY is not None and (not isinstance(somaDistY, list) or len(somaDistY) != 2): - print('Error adding secList: somaDistY should be a list with 2 elements') + logger.warning('Error adding secList: somaDistY should be a list with 2 elements') return @@ -461,7 +461,7 @@ def addCellParamsSecList(self, label, secListName, somaDist=None, somaDistY=None secList.append(secName) else: - print('Error adding secList: Sections do not contain 3d points') + logger.warning('Error adding secList: Sections do not contain 3d points') return cellRule.secLists[secListName] = list(secList) @@ -470,11 +470,11 @@ def swapCellParamsPt3d(self, label, origIndex, targetIndex): if label in self.cellParams: cellRule = self.cellParams[label] else: - print('Error swapping 3d pts: netParams.cellParams does not contain %s' % (label)) + logger.warning('Error swapping 3d pts: netParams.cellParams does not contain %s' % (label)) return if origIndex not in list(range(4)) and targetIndex not in list(range(4)): # check valid indices (x,y,z,d) - print('Error swapping 3d pts: indices should be 0, 1, 2 or 3 (x,y,z,d)') + logger.warning('Error swapping 3d pts: indices should be 0, 1, 2 or 3 (x,y,z,d)') return for sec in list(cellRule.secs.values()): @@ -496,7 +496,7 @@ def addCellParamsWeightNorm(self, label, fileName, threshold=1000): if label in self.cellParams: cellRule = self.cellParams[label] else: - print('Error adding weightNorm: netParams.cellParams does not contain %s' % (label)) + logger.warning('Error adding weightNorm: netParams.cellParams does not contain %s' % (label)) return with open(fileName, 'rb') as fileObj: @@ -509,7 +509,7 @@ def addCellParamsWeightNorm(self, label, fileName, threshold=1000): somaSec = next((k for k in list(weightNorm.keys()) if k.startswith('soma')),None) somaWeightNorm = weightNorm[somaSec][0] except: - print('Error setting weightNorm: no soma section available to set threshold') + logger.warning('Error setting weightNorm: no soma section available to set threshold') return for sec, wnorm in weightNorm.items(): if sec in cellRule['secs']: @@ -519,7 +519,7 @@ def addCellParamsWeightNorm(self, label, fileName, threshold=1000): def addCellParamsTemplate(self, label, conds={}, template=None): if label in self.cellParams: - print('CellParams key %s already exists...' % (label)) + logger.info('CellParams key %s already exists...' % (label)) secs = {} if template == 'Simple_HH': @@ -547,7 +547,7 @@ def saveCellParamsRule(self, label, fileName): if label in self.cellParams: cellRule = self.cellParams[label] else: - print('Error saving: netParams.cellParams does not contain %s' % (label)) + logger.warning('Error saving: netParams.cellParams does not contain %s' % (label)) return if ext == 'pkl': diff --git a/netpyne/specs/simConfig.py b/netpyne/specs/simConfig.py index 2e403c9ca..a14f16a64 100644 --- a/netpyne/specs/simConfig.py +++ b/netpyne/specs/simConfig.py @@ -20,7 +20,7 @@ from collections import OrderedDict from .dicts import Dict, ODict - +from netpyne.logger import logger # ---------------------------------------------------------------------------- # SIMULATION CONFIGURATION CLASS @@ -122,14 +122,14 @@ def save(self, filename): os.mkdir(folder) except OSError: if not os.path.exists(folder): - print(' Could not create', folder) + logger.warning('Could not create ' + folder) dataSave = {'simConfig': self.__dict__} # Save to json file if ext == 'json': from .. import sim - print(('Saving simConfig to %s ... ' % (filename))) + logger.info('Saving simConfig to %s ... ' % (filename)) sim.saveJSON(filename, dataSave) def setParam(self, param, value): diff --git a/netpyne/support/bsmart.py b/netpyne/support/bsmart.py index b0b4d6a3b..f879f030f 100644 --- a/netpyne/support/bsmart.py +++ b/netpyne/support/bsmart.py @@ -49,6 +49,7 @@ from builtins import range from future import standard_library standard_library.install_aliases() +from netpyne.logger import logger # ARMORF -- AR parameter estimation via LWR method modified by Morf. # @@ -101,7 +102,7 @@ def ckchol(M): try: # First, try the Cholesky decomposition output=np.linalg.cholesky(M) except: # If not, just return garbage - print('WARNING: Cholesky failed, so returning (invalid) identity matrix!') + logger.warning('Cholesky failed, so returning (invalid) identity matrix!') output=np.matrix(np.eye(np.size(M,0))) return output diff --git a/netpyne/support/recxelectrode.py b/netpyne/support/recxelectrode.py index 54ab34ce6..7c5cc6497 100644 --- a/netpyne/support/recxelectrode.py +++ b/netpyne/support/recxelectrode.py @@ -3,7 +3,6 @@ """ -from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import @@ -43,7 +42,7 @@ standard_library.install_aliases() import numpy as np import math - +from netpyne.logger import logger class RecXElectrode(object): """Extracellular electrode @@ -61,8 +60,8 @@ def __init__(self, sim): self.nsites = self.pos.shape[0] self.transferResistances = {} except: - print('Error creating extracellular electrode: sim.cfg.recordLFP should contain a list of x,y,z locations') - return None + logger.warning('Error creating extracellular electrode: sim.cfg.recordLFP should contain a list of x,y,z locations') + return None self.nsites = self.pos.shape[1] self.transferResistances = {} # V_e = transfer_resistance*Im diff --git a/netpyne/support/scalebar.py b/netpyne/support/scalebar.py index a244a7d70..926f1ca9f 100644 --- a/netpyne/support/scalebar.py +++ b/netpyne/support/scalebar.py @@ -10,7 +10,6 @@ # LICENSE: Python Software Foundation (http://docs.python.org/license.html) from __future__ import unicode_literals -from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library diff --git a/netpyne/support/stackedBarGraph.py b/netpyne/support/stackedBarGraph.py index 484aaf453..03cf421e1 100644 --- a/netpyne/support/stackedBarGraph.py +++ b/netpyne/support/stackedBarGraph.py @@ -26,7 +26,6 @@ ############################################################################### from __future__ import division -from __future__ import print_function from __future__ import unicode_literals from __future__ import absolute_import from builtins import range @@ -47,6 +46,7 @@ import numpy as np from matplotlib import pyplot as plt +from netpyne.logger import logger ############################################################################### @@ -196,7 +196,7 @@ def stackedBarPlot(self, data_copy /= data_stack[levels-1] data_stack /= data_stack[levels-1] if heights is not None: - print("WARNING: setting scale and heights does not make sense.") + logger.warning("Setting scale and heights does not make sense.") heights = None elif heights is not None: data_copy /= data_stack[levels-1] From c40a6efebd497d5a39650778ce765054419f3475 Mon Sep 17 00:00:00 2001 From: Evgenia Karunus Date: Mon, 2 Aug 2021 14:05:20 +0500 Subject: [PATCH 02/13] Get rid of the sim.cfg.verbose option --- netpyne/batch/asd_parallel.py | 10 ++-------- netpyne/batch/utils.py | 14 ++------------ netpyne/cell/compartCell.py | 2 +- netpyne/conversion/neuromlFormat.py | 14 ++++---------- netpyne/conversion/sonataImport.py | 2 -- netpyne/metadata/metadata.py | 6 ------ netpyne/specs/simConfig.py | 1 - 7 files changed, 9 insertions(+), 40 deletions(-) diff --git a/netpyne/batch/asd_parallel.py b/netpyne/batch/asd_parallel.py index 0a6ae47b4..549dbbcb2 100644 --- a/netpyne/batch/asd_parallel.py +++ b/netpyne/batch/asd_parallel.py @@ -51,7 +51,7 @@ def asd(function, xPop, saveFile=None, args=None, stepsize=0.1, sinc=2, sdec=2, pinc=2, pdec=2, pinitial=None, sinitial=None, xmin=None, xmax=None, maxiters=None, maxtime=None, abstol=1e-6, reltol=1e-3, stalliters=None, stoppingfunc=None, randseed=None, - label=None, maxFitness=None, verbose=2, **kwargs): + label=None, maxFitness=None, **kwargs): """ Function for/to @@ -165,11 +165,6 @@ def asd(function, xPop, saveFile=None, args=None, stepsize=0.1, sinc=2, sdec=2, **Default:** ``None`` **Options:** ``