From c0010cabf6bb6745c779fdad670cff861dfbfa4d Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Mon, 18 May 2020 11:28:13 +0100
Subject: [PATCH 01/44] Add nml 2.1 schema
---
neuroml/nml/NeuroML_v2.1.xsd | 3256 ++++++++++++++++++++++++++++++++++
1 file changed, 3256 insertions(+)
create mode 100644 neuroml/nml/NeuroML_v2.1.xsd
diff --git a/neuroml/nml/NeuroML_v2.1.xsd b/neuroml/nml/NeuroML_v2.1.xsd
new file mode 100644
index 00000000..32352f7c
--- /dev/null
+++ b/neuroml/nml/NeuroML_v2.1.xsd
@@ -0,0 +1,3256 @@
+
+
+
+
+
+
+
+
+
+ An id attribute for elements which need to be identified uniquely (normally just within their parent element).
+
+
+
+
+
+
+
+
+
+
+ A value for a physical quantity in NeuroML 2, e.g. 20, -60.0mV or 5nA
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ An id string for pointing to an entry in an annotation element related to a MIRIAM resource. Based on metaid of SBML
+
+
+
+
+
+
+
+
+
+
+
+ An id string for pointing to an entry in the NeuroLex ontology. Use of this attribute is a shorthand for a full
+ RDF based reference to the MIRIAM Resource urn:miriam:neurolex, with an bqbiol:is qualifier
+
+
+
+
+
+
+
+
+
+
+
+
+ An attribute useful as id of segments, connections, etc: integer >=0 only!
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Integer >=1 only!
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Double >0 only
+
+
+
+
+
+
+
+
+ Value which is either 0 or 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Textual human readable notes related to the element in question. It's useful to put these into
+ the NeuroML files instead of XML comments, as the notes can be extracted and repeated in the files to which the NeuroML is mapped.
+
+
+
+
+
+
+
+
+ Generic property with a tag and value
+
+
+
+
+
+
+
+
+ Placeholder for MIRIAM related metadata, among others.
+
+
+
+
+
+
+
+
+
+ Contains an extension to NeuroML by creating custom LEMS ComponentType.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ LEMS ComponentType for Constant.
+
+
+
+
+
+
+
+
+
+
+
+
+ LEMS Exposure (ComponentType property)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ LEMS ComponentType for Dynamics
+
+
+
+
+
+
+
+
+
+
+
+
+ LEMS ComponentType for DerivedVariable
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ LEMS ComponentType for ConditionalDerivedVariable
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Float value restricted to between 1 and 0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ The root NeuroML element.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Various types of cells which are defined in NeuroML 2. This list will be expanded...
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Various types of cells which are defined in NeuroML 2 based on PyNN standard cell models.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Various types of synapse which are defined in NeuroML 2. This list will be expanded...
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Various types of synapse which are defined in NeuroML 2 based on PyNN standard cell/synapse models.
+
+
+
+
+
+
+
+
+
+
+
+ Various types of inputs which are defined in NeuroML2. This list will be expanded...
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Various types of input which are defined in NeuroML 2 based on PyNN standard cell/synapse models.
+
+
+
+
+
+
+
+
+
+
+ Various types of concentration model which are defined in NeuroML 2. This list will be expanded...
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Kinetic scheme based ion channel.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Note ionChannel and ionChannelHH are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH.
+ NeuroML v2beta4 should remove one of these, probably ionChannelHH.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Note ionChannel and ionChannelHH are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH.
+ NeuroML v2beta4 should remove one of these, probably ionChannelHH.
+
+
+
+
+
+
+
+
+ Same as ionChannel, but with a vShift parameter to change voltage activation of gates. The exact usage of vShift in expressions for rates is determined by the individual gates.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Note all sub elements for gateHHrates, gateHHratesTau, gateFractional etc. allowed here. Which are valid should be constrained by what type is set
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Should not be required, as it's present on the species element!
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Should not be required, as it's present on the species element!
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Gap junction/single electrical connection
+
+
+
+
+
+
+
+
+
+
+
+
+ Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection (continuousConnection).
+
+
+
+
+
+
+
+
+
+
+
+
+ Behaves just like a one way gap junction.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Based on synapse in Methods of http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ This is to prevent it conflicting with attribute c (lowercase) e.g. in izhikevichCell2007
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Should only be used if morphology element is outside the cell.
+ This points to the id of the morphology
+
+
+
+
+
+
+ Should only be used if biophysicalProperties element is outside the cell.
+ This points to the id of the biophysicalProperties
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Standalone element which is usually inside a single cell, but could be outside and
+ referenced by id.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ A 3D point with diameter.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Allowed metrics for InhomogeneousParam
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Standalone element which is usually inside a single cell, but could be outside and
+ referenced by id.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Standalone element which is usually inside a single cell, but could be outside and
+ referenced by id.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Using a thin extension of ValueAcrossSegOrSegGroup to facilitate library generation (e.g. libNeuroML)
+
+
+
+
+
+
+
+
+
+ Using a thin extension of ValueAcrossSegOrSegGroup to facilitate library generation (e.g. libNeuroML)
+
+
+
+
+
+
+
+
+
+ Using a thin extension of ValueAcrossSegOrSegGroup to facilitate library generation (e.g. libNeuroML)
+
+
+
+
+
+
+
+
+
+ Using a thin extension of ValueAcrossSegOrSegGroup to facilitate library generation (e.g. libNeuroML)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here
+ TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise.
+ Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON).
+ Currently a required attribute.
+ It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel
+ element. TODO: remove.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here
+ TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise.
+ Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON).
+ Currently a required attribute.
+ It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel
+ element. TODO: remove.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here
+ TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise.
+ Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON).
+ Currently a required attribute.
+ It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel
+ element. TODO: remove.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here
+ TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise.
+ Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON).
+ Currently a required attribute.
+ It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel
+ element. TODO: remove.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here
+ TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise.
+ Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON).
+ Currently a required attribute.
+ It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel
+ element. TODO: remove.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here
+ TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise.
+ Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON).
+ Currently a required attribute.
+ It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel
+ element. TODO: remove.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here
+ TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise.
+ Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON).
+ Currently a required attribute.
+ It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel
+ element. TODO: remove.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here
+ TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise.
+ Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON).
+ Currently a required attribute.
+ It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel
+ element. TODO: remove.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now
+ until LEMS implementation can select by id. TODO: remove.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Generates a constant current pulse of a certain amplitude (with dimensions for current) for a specified duration after a delay.
+
+
+
+
+
+
+
+
+
+
+
+
+ Generates a constant current pulse of a certain amplitude (non dimensional) for a specified duration after a delay.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Single explicit connection. Introduced to test connections in LEMS. Will probably be removed in favour of
+ connections wrapped in projection element
+
+
+
+
+
+
+
+
+
+
+ Base for projection (set of synaptic connections) between two populations
+
+
+
+
+
+
+
+
+
+
+
+ Projection (set of synaptic connections) between two populations. Chemical/event based synaptic transmission
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Base of all synaptic connections (chemical/electrical/analog, etc.) inside projections
+
+
+
+
+
+
+
+
+
+
+
+ Base of all synaptic connections with preCellId, postSegmentId, etc.
+ Note: this is not the best name for these attributes, since Id is superfluous, hence BaseConnectionNewFormat
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Base of all synaptic connections with preCell, postSegment, etc.
+ See BaseConnectionOldFormat
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Individual chemical (event based) synaptic connection, weight==1 and no delay
+
+
+
+
+
+
+
+
+
+ Individual synaptic connection with weight and delay
+
+
+
+
+
+
+
+
+
+
+
+
+ Projection between two populations consisting of electrical connections (gap junctions)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Individual electrical synaptic connection
+
+
+
+
+
+
+
+
+
+
+
+ Projection between two populations consisting of analog connections (e.g. graded synapses)
+
+
+
+
+
+
+
+
+ Projection between two populations consisting of analog connections (e.g. graded synapses). Includes setting of weight for the connection
+
+
+
+
+
+
+
+
+
+
+
+ Projection between two populations consisting of analog connections (e.g. graded synapses)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Individual continuous/analog synaptic connection
+
+
+
+
+
+
+
+
+
+
+
+
+ Individual continuous/analog synaptic connection - instance based
+
+
+
+
+
+
+
+
+
+ Individual continuous/analog synaptic connection - instance based. Includes setting of _weight for the connection
+
+
+
+
+
+
+
+
+
+
+
+ Single explicit input. Introduced to test inputs in LEMS. Will probably be removed in favour of
+ inputs wrapped in inputList element
+
+
+
+
+
+
+
+
+
+ List of inputs to a population. Currents will be provided by the specified component.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Individual input to the cell specified by target
+
+
+
+
+
+
+
+
+
+
+
+ Individual input to the cell specified by target. Includes setting of _weight for the connection
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Base element without ID specified *yet*, e.g. for an element with a particular requirement on its id which does not comply with NmlId (e.g. Segment needs nonNegativeInteger).
+
+
+
+
+
+
+
+
+
+
+ Anything which can have a unique (within its parent) id, which must be an integer zero or greater.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Anything which can have a unique (within its parent) id of the form NmlId (spaceless combination of letters, numbers and underscore).
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Elements which can stand alone and be referenced by id, e.g. cell, morphology.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
From 2807173626d3d00e6ffadd728a43465603a54431 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Mon, 18 May 2020 11:48:52 +0100
Subject: [PATCH 02/44] Update to v0.2.51; use NML schema v2.1
---
.travis.yml | 2 --
neuroml/__init__.py | 4 ++--
neuroml/examples/test_files/complete.nml | 2 +-
neuroml/examples/test_files/complete.nml.h5 | Bin 70042 -> 70042 bytes
neuroml/examples/test_files/testh5.nml | 2 +-
neuroml/nml/README.md | 2 +-
neuroml/nml/nml.py | 8 ++++----
setup.py | 1 +
8 files changed, 10 insertions(+), 11 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index da245f08..21afd7fe 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -9,8 +9,6 @@ addons:
matrix:
include:
- python: 3.7
- dist: xenial
- sudo: true
- python: 3.6
- python: 3.5
- python: 2.7
diff --git a/neuroml/__init__.py b/neuroml/__init__.py
index b96d5544..1c9fb09c 100644
--- a/neuroml/__init__.py
+++ b/neuroml/__init__.py
@@ -1,7 +1,7 @@
from .nml.nml import * # allows importation of all neuroml classes
-__version__ = '0.2.50'
+__version__ = '0.2.51'
__version_info__ = tuple(int(i) for i in __version__.split('.'))
-current_neuroml_version = "v2.0"
+current_neuroml_version = "v2.1"
diff --git a/neuroml/examples/test_files/complete.nml b/neuroml/examples/test_files/complete.nml
index 67f1cd60..b7638c0f 100644
--- a/neuroml/examples/test_files/complete.nml
+++ b/neuroml/examples/test_files/complete.nml
@@ -1,4 +1,4 @@
-
+
Lots of notes....
diff --git a/neuroml/examples/test_files/complete.nml.h5 b/neuroml/examples/test_files/complete.nml.h5
index d598d6be3ebcb581ba50cb83682a912db39d372f..c5cb4929d569e8d555315381c71eebcc2e981b1f 100644
GIT binary patch
delta 161
zcmbQWm}S;tmJKgC84WkT;#?)kaxVQ)+++tchs`&XQ-r|0%?@TqBoN{r{w5+2@rGm-
zQH1!5iYvkp@dG^~5EWo^Bo>~5h=X|=TSXvxHanbZK@#V
+
Root notes
diff --git a/neuroml/nml/README.md b/neuroml/nml/README.md
index 13dc064e..eba907c7 100644
--- a/neuroml/nml/README.md
+++ b/neuroml/nml/README.md
@@ -10,7 +10,7 @@ Unit tests should be run to confirm this.
generateDS.py should be invoked in this folder (so that generateds_config.py can be located) with the following command (namespace def here will be mended when it's become stable)
- generateDS.py -o nml.py --use-getter-setter=none --silence --user-methods=helper_methods NeuroML_v2beta5.xsd
+ generateDS.py -o nml.py --use-getter-setter=none --silence --user-methods=helper_methods NeuroML_v2.1.xsd
You may have to add the current folder to your PYTHONPATH, i.e.
diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py
index 6ce043aa..24fe1107 100644
--- a/neuroml/nml/nml.py
+++ b/neuroml/nml/nml.py
@@ -2,8 +2,8 @@
# -*- coding: utf-8 -*-
#
-# Generated Tue Jul 2 13:54:28 2019 by generateDS.py version 2.30.12.
-# Python 2.7.15 |Anaconda, Inc.| (default, Dec 14 2018, 19:04:19) [GCC 7.3.0]
+# Generated Mon May 18 11:32:26 2020 by generateDS.py version 2.30.12.
+# Python 2.7.16 |Anaconda, Inc.| (default, Mar 14 2019, 21:00:58) [GCC 7.3.0]
#
# Command line options:
# ('-o', 'nml.py')
@@ -12,10 +12,10 @@
# ('--user-methods', 'helper_methods')
#
# Command line arguments:
-# NeuroML_v2beta5.xsd
+# NeuroML_v2.1.xsd
#
# Command line:
-# /home/padraig/anaconda2/bin/generateDS.py -o "nml.py" --use-getter-setter="none" --silence --user-methods="helper_methods" NeuroML_v2beta5.xsd
+# /home/padraig/anaconda2/bin/generateDS.py -o "nml.py" --use-getter-setter="none" --silence --user-methods="helper_methods" NeuroML_v2.1.xsd
#
# Current working directory (os.getcwd()):
# nml
diff --git a/setup.py b/setup.py
index fb8a2be9..d41a8895 100644
--- a/setup.py
+++ b/setup.py
@@ -17,6 +17,7 @@
author_email = "vellamike@gmail.com, p.gleeson@gmail.com",
description = "A Python library for working with NeuroML descriptions of neuronal models",
long_description = long_description,
+ long_description_content_type="text/markdown",
install_requires=['lxml', 'six'],
tests_require=["nose"],
extras_require={"full": [
From b91d0b957fd822792c1a3569cf779f993a0ce183 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Tue, 16 Jun 2020 18:54:42 +0100
Subject: [PATCH 03/44] Improved helper methods & more tests
---
neuroml/__init__.py | 2 +-
.../examples/test_files/pyr_4_sym.cell.nml | 10 +-
neuroml/nml/helper_methods.py | 148 ++++++++++++++++--
neuroml/nml/nml.py | 144 ++++++++++++++---
neuroml/test/test_cell.py | 109 ++++++++++++-
neuroml/test/test_segment.py | 92 ++++++++++-
6 files changed, 468 insertions(+), 37 deletions(-)
diff --git a/neuroml/__init__.py b/neuroml/__init__.py
index 1c9fb09c..6b1c6841 100644
--- a/neuroml/__init__.py
+++ b/neuroml/__init__.py
@@ -1,6 +1,6 @@
from .nml.nml import * # allows importation of all neuroml classes
-__version__ = '0.2.51'
+__version__ = '0.2.52'
__version_info__ = tuple(int(i) for i in __version__.split('.'))
diff --git a/neuroml/examples/test_files/pyr_4_sym.cell.nml b/neuroml/examples/test_files/pyr_4_sym.cell.nml
index a78525eb..dda62784 100644
--- a/neuroml/examples/test_files/pyr_4_sym.cell.nml
+++ b/neuroml/examples/test_files/pyr_4_sym.cell.nml
@@ -163,8 +163,14 @@
-
-
+
+
+
+
+
+
+
+
diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py
index 17ad59ae..fbc15d41 100644
--- a/neuroml/nml/helper_methods.py
+++ b/neuroml/nml/helper_methods.py
@@ -95,10 +95,14 @@ def num_segments(self):
source='''\
@property
def length(self):
+
+ if self.proximal==None:
+ raise Exception('Cannot get length of segment '+str(self.id)+' using the length property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_length(segment_id) on the cell instead.')
+
prox_x = self.proximal.x
prox_y = self.proximal.y
prox_z = self.proximal.z
-
+
dist_x = self.distal.x
dist_y = self.distal.y
dist_z = self.distal.z
@@ -109,7 +113,7 @@ def length(self):
def __str__(self):
- return ""
+ return ""
def __repr__(self):
@@ -123,10 +127,23 @@ def __repr__(self):
source='''\
@property
def volume(self):
+
from math import pi
-
+ if self.proximal==None:
+ raise Exception('Cannot get volume of segment '+str(self.id)+' using the volume property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_volume(segment_id) on the cell instead.')
+
prox_rad = self.proximal.diameter/2.0
dist_rad = self.distal.diameter/2.0
+
+ if self.proximal.x == self.distal.x and \
+ self.proximal.y == self.distal.y and \
+ self.proximal.z == self.distal.z:
+
+ if prox_rad!=dist_rad:
+ raise Exception('Cannot get volume of segment '+str(self.id)+'. The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the volume calculation ambiguous.')
+
+ return 4.0/3 * pi * prox_rad**3
+
length = self.length
volume = (pi/3)*length*(prox_rad**2+dist_rad**2+prox_rad*dist_rad)
@@ -144,9 +161,22 @@ def volume(self):
def surface_area(self):
from math import pi
from math import sqrt
-
+
+ if self.proximal==None:
+ raise Exception('Cannot get surface area of segment '+str(self.id)+' using the surface_area property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_surface_area(segment_id) on the cell instead.')
+
prox_rad = self.proximal.diameter/2.0
dist_rad = self.distal.diameter/2.0
+
+ if self.proximal.x == self.distal.x and \
+ self.proximal.y == self.distal.y and \
+ self.proximal.z == self.distal.z:
+
+ if prox_rad!=dist_rad:
+ raise Exception('Cannot get surface area of segment '+str(self.id)+'. The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the surface area calculation ambiguous.')
+
+ return 4.0 * pi * prox_rad**2
+
length = self.length
surface_area = pi*(prox_rad+dist_rad)*sqrt((prox_rad-dist_rad)**2+length**2)
@@ -183,9 +213,39 @@ def __repr__(self):
''',
class_names=("SegmentGroup")
)
+
+METHOD_SPECS+=(seg_grp,)
+
+seg_grp = MethodSpec(name='Point3DWithDiam',
+ source='''\
+
+ def __str__(self):
+
+ return "("+str(self.x)+", "+str(self.y)+", "+str(self.z)+"), diam "+str(self.diameter)+"um"
+
+ def __repr__(self):
+
+ return str(self)
+
+ def distance_to(self, other_3d_point):
+ a_x = self.x
+ a_y = self.y
+ a_z = self.z
+
+ b_x = other_3d_point.x
+ b_y = other_3d_point.y
+ b_z = other_3d_point.z
+
+ distance = ((a_x-b_x)**2 + (a_y-b_y)**2 + (a_z-b_z)**2)**(0.5)
+ return distance
+
+''',
+ class_names=("Point3DWithDiam")
+ )
METHOD_SPECS+=(seg_grp,)
+
connection_cell_ids = MethodSpec(name='connection_cell_ids',
source='''\
@@ -728,6 +788,72 @@ def __str__(self):
source='''\
+ # Get segment object by its id
+ def get_segment(self, segment_id):
+
+ for segment in self.morphology.segments:
+ if segment.id == segment_id:
+ return segment
+
+ raise Exception("Segment with id "+str(segment_id)+" not found in cell "+str(self.id))
+
+ # Get the proximal point of a segment, even the proximal field is None and
+ # so the proximal point is on the parent (at a point set by fraction_along)
+ def get_actual_proximal(self, segment_id):
+
+ segment = self.get_segment(segment_id)
+ if segment.proximal:
+ return segment.proximal
+
+ parent = self.get_segment(segment.parent.segments)
+ fract = float(segment.parent.fraction_along)
+ if fract==1:
+ return parent.distal
+ elif fract==0:
+ return self.get_actual_proximal(segment.parent.segments)
+ else:
+ pd = parent.distal
+ pp = self.get_actual_proximal(segment.parent.segments)
+ p = Point3DWithDiam((1-fract)*pp.x+fract*pd.x, (1-fract)*pp.y+fract*pd.y, (1-fract)*pp.z+fract*pd.z)
+ p.diameter = (1-fract)*pp.diameter+fract*pd.diameter
+
+ return p
+
+ def get_segment_length(self, segment_id):
+
+ segment = self.get_segment(segment_id)
+ if segment.proximal:
+ return segment.length
+ else:
+ prox = self.get_actual_proximal(segment_id)
+
+ length = segment.distal.distance_to(prox)
+
+ return length
+
+ def get_segment_surface_area(self, segment_id):
+
+ segment = self.get_segment(segment_id)
+ if segment.proximal:
+ return segment.surface_area
+ else:
+ prox = self.get_actual_proximal(segment_id)
+
+ temp_seg = Segment(distal=segment.distal, proximal=prox)
+
+ return temp_seg.surface_area
+
+ def get_segment_volume(self, segment_id):
+
+ segment = self.get_segment(segment_id)
+ if segment.proximal:
+ return segment.volume
+ else:
+ prox = self.get_actual_proximal(segment_id)
+
+ temp_seg = Segment(distal=segment.distal, proximal=prox)
+
+ return temp_seg.volume
def get_segment_ids_vs_segments(self):
@@ -744,7 +870,9 @@ def get_all_segments_in_group(self,
for sg in self.morphology.segment_groups:
if sg.id == segment_group:
segment_group = sg
-
+ if isinstance(segment_group, str): #
+ raise Exception('No segment group '+segment_group+ ' found in cell '+self.id)
+
all_segs = []
for member in segment_group.members:
@@ -820,12 +948,8 @@ def get_ordered_segments_in_groups(self,
tot_len = 0
for seg in ord_segs[key]:
- d = seg.distal
- p = seg.proximal
- if not p:
- parent_seg = segments[seg.parent.segments]
- p = parent_seg.distal
- length = math.sqrt( (d.x-p.x)**2 + (d.y-p.y)**2 + (d.z-p.z)**2 )
+
+ length = self.get_segment_length(seg.id)
if not seg.parent or not seg.parent.segments in path_lengths_to_distal[key]:
@@ -886,8 +1010,6 @@ def summary(self):
print("* Notes: "+str(self.notes))
print("* Segments: "+str(len(self.morphology.segments)))
print("* SegmentGroups: "+str(len(self.morphology.segment_groups)))
-
-
print("*******************************************************")
''',
diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py
index 24fe1107..e7e25923 100644
--- a/neuroml/nml/nml.py
+++ b/neuroml/nml/nml.py
@@ -2,8 +2,8 @@
# -*- coding: utf-8 -*-
#
-# Generated Mon May 18 11:32:26 2020 by generateDS.py version 2.30.12.
-# Python 2.7.16 |Anaconda, Inc.| (default, Mar 14 2019, 21:00:58) [GCC 7.3.0]
+# Generated Tue Jun 16 15:29:34 2020 by generateDS.py version 2.30.11.
+# Python 2.7.15 |Anaconda, Inc.| (default, Oct 23 2018, 13:35:16) [GCC 4.2.1 Compatible Clang 4.0.1 (tags/RELEASE_401/final)]
#
# Command line options:
# ('-o', 'nml.py')
@@ -15,13 +15,12 @@
# NeuroML_v2.1.xsd
#
# Command line:
-# /home/padraig/anaconda2/bin/generateDS.py -o "nml.py" --use-getter-setter="none" --silence --user-methods="helper_methods" NeuroML_v2.1.xsd
+# /Users/padraig/anaconda/envs/py27/bin/generateDS.py -o "nml.py" --use-getter-setter="none" --silence --user-methods="helper_methods" NeuroML_v2.1.xsd
#
# Current working directory (os.getcwd()):
# nml
#
-import os
import sys
import re as re_
import base64
@@ -49,7 +48,7 @@ def parsexml_(infile, parser=None, **kwargs):
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
- doc = etree_.parse(os.path.join(infile), parser=parser, **kwargs)
+ doc = etree_.parse(infile, parser=parser, **kwargs)
return doc
def parsexmlstring_(instring, parser=None, **kwargs):
@@ -3487,6 +3486,27 @@ def buildAttributes(self, node, attrs, already_processed):
self.validate_DoubleGreaterThanZero(self.diameter) # validate type DoubleGreaterThanZero
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
+
+ def __str__(self):
+
+ return "("+str(self.x)+", "+str(self.y)+", "+str(self.z)+"), diam "+str(self.diameter)+"um"
+
+ def __repr__(self):
+
+ return str(self)
+
+ def distance_to(self, other_3d_point):
+ a_x = self.x
+ a_y = self.y
+ a_z = self.z
+
+ b_x = other_3d_point.x
+ b_y = other_3d_point.y
+ b_z = other_3d_point.z
+
+ distance = ((a_x-b_x)**2 + (a_y-b_y)**2 + (a_z-b_z)**2)**(0.5)
+ return distance
+
# end class Point3DWithDiam
@@ -12323,10 +12343,14 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(Segment, self).buildChildren(child_, node, nodeName_, True)
@property
def length(self):
+
+ if self.proximal==None:
+ raise Exception('Cannot get length of segment '+str(self.id)+' using the length property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_length(segment_id) on the cell instead.')
+
prox_x = self.proximal.x
prox_y = self.proximal.y
prox_z = self.proximal.z
-
+
dist_x = self.distal.x
dist_y = self.distal.y
dist_z = self.distal.z
@@ -12337,7 +12361,7 @@ def length(self):
def __str__(self):
- return ""
+ return ""
def __repr__(self):
@@ -12345,10 +12369,21 @@ def __repr__(self):
@property
def volume(self):
+
from math import pi
-
+ if self.proximal==None:
+ raise Exception('Cannot get volume of segment '+str(self.id)+' using the volume property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_volume(segment_id) on the cell instead.')
+
prox_rad = self.proximal.diameter/2.0
dist_rad = self.distal.diameter/2.0
+
+ if self.proximal.x == self.distal.x and self.proximal.y == self.distal.y and self.proximal.z == self.distal.z:
+
+ if prox_rad!=dist_rad:
+ raise Exception('Cannot get volume of segment '+str(self.id)+'. The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the volume calculation ambiguous.')
+
+ return 4.0/3 * pi * prox_rad**3
+
length = self.length
volume = (pi/3)*length*(prox_rad**2+dist_rad**2+prox_rad*dist_rad)
@@ -12359,9 +12394,20 @@ def volume(self):
def surface_area(self):
from math import pi
from math import sqrt
-
+
+ if self.proximal==None:
+ raise Exception('Cannot get surface area of segment '+str(self.id)+' using the surface_area property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_surface_area(segment_id) on the cell instead.')
+
prox_rad = self.proximal.diameter/2.0
dist_rad = self.distal.diameter/2.0
+
+ if self.proximal.x == self.distal.x and self.proximal.y == self.distal.y and self.proximal.z == self.distal.z:
+
+ if prox_rad!=dist_rad:
+ raise Exception('Cannot get surface area of segment '+str(self.id)+'. The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the surface area calculation ambiguous.')
+
+ return 4.0 * pi * prox_rad**2
+
length = self.length
surface_area = pi*(prox_rad+dist_rad)*sqrt((prox_rad-dist_rad)**2+length**2)
@@ -17721,6 +17767,72 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(Cell, self).buildChildren(child_, node, nodeName_, True)
+ # Get segment object by its id
+ def get_segment(self, segment_id):
+
+ for segment in self.morphology.segments:
+ if segment.id == segment_id:
+ return segment
+
+ raise Exception("Segment with id "+str(segment_id)+" not found in cell "+str(self.id))
+
+ # Get the proximal point of a segment, even the proximal field is None and
+ # so the proximal point is on the parent (at a point set by fraction_along)
+ def get_actual_proximal(self, segment_id):
+
+ segment = self.get_segment(segment_id)
+ if segment.proximal:
+ return segment.proximal
+
+ parent = self.get_segment(segment.parent.segments)
+ fract = float(segment.parent.fraction_along)
+ if fract==1:
+ return parent.distal
+ elif fract==0:
+ return self.get_actual_proximal(segment.parent.segments)
+ else:
+ pd = parent.distal
+ pp = self.get_actual_proximal(segment.parent.segments)
+ p = Point3DWithDiam((1-fract)*pp.x+fract*pd.x, (1-fract)*pp.y+fract*pd.y, (1-fract)*pp.z+fract*pd.z)
+ p.diameter = (1-fract)*pp.diameter+fract*pd.diameter
+
+ return p
+
+ def get_segment_length(self, segment_id):
+
+ segment = self.get_segment(segment_id)
+ if segment.proximal:
+ return segment.length
+ else:
+ prox = self.get_actual_proximal(segment_id)
+
+ length = segment.distal.distance_to(prox)
+
+ return length
+
+ def get_segment_surface_area(self, segment_id):
+
+ segment = self.get_segment(segment_id)
+ if segment.proximal:
+ return segment.surface_area
+ else:
+ prox = self.get_actual_proximal(segment_id)
+
+ temp_seg = Segment(distal=segment.distal, proximal=prox)
+
+ return temp_seg.surface_area
+
+ def get_segment_volume(self, segment_id):
+
+ segment = self.get_segment(segment_id)
+ if segment.proximal:
+ return segment.volume
+ else:
+ prox = self.get_actual_proximal(segment_id)
+
+ temp_seg = Segment(distal=segment.distal, proximal=prox)
+
+ return temp_seg.volume
def get_segment_ids_vs_segments(self):
@@ -17737,7 +17849,9 @@ def get_all_segments_in_group(self,
for sg in self.morphology.segment_groups:
if sg.id == segment_group:
segment_group = sg
-
+ if isinstance(segment_group, str): #
+ raise Exception('No segment group '+segment_group+ ' found in cell '+self.id)
+
all_segs = []
for member in segment_group.members:
@@ -17813,12 +17927,8 @@ def get_ordered_segments_in_groups(self,
tot_len = 0
for seg in ord_segs[key]:
- d = seg.distal
- p = seg.proximal
- if not p:
- parent_seg = segments[seg.parent.segments]
- p = parent_seg.distal
- length = math.sqrt( (d.x-p.x)**2 + (d.y-p.y)**2 + (d.z-p.z)**2 )
+
+ length = self.get_segment_length(seg.id)
if not seg.parent or not seg.parent.segments in path_lengths_to_distal[key]:
@@ -17879,8 +17989,6 @@ def summary(self):
print("* Notes: "+str(self.notes))
print("* Segments: "+str(len(self.morphology.segments)))
print("* SegmentGroups: "+str(len(self.morphology.segment_groups)))
-
-
print("*******************************************************")
# end class Cell
diff --git a/neuroml/test/test_cell.py b/neuroml/test/test_cell.py
index 25f92356..b80ea45c 100644
--- a/neuroml/test/test_cell.py
+++ b/neuroml/test/test_cell.py
@@ -5,6 +5,16 @@
import neuroml
from neuroml import loaders
+
+from neuroml import Segment
+from neuroml import SegmentParent
+from neuroml import SegmentGroup
+from neuroml import Member
+from neuroml import Include
+from neuroml import Cell
+from neuroml import Morphology
+from neuroml import Point3DWithDiam
+
import os
try:
@@ -22,8 +32,12 @@ def test_cell_methods(self):
for cell_name in cells:
- root_dir = os.path.dirname(neuroml.__file__)
- test_file_path = os.path.join(root_dir,'examples/test_files/%s.cell.nml'%cell_name)
+ local_path = '../examples/test_files/%s.cell.nml'%cell_name
+ if os.path.isfile(local_path):
+ test_file_path = local_path
+ else:
+ root_dir = os.path.dirname(neuroml.__file__)
+ test_file_path = os.path.join(root_dir,'examples/test_files/%s.cell.nml'%cell_name)
print('test file path is: '+test_file_path)
f = open(test_file_path,'r')
@@ -32,6 +46,14 @@ def test_cell_methods(self):
cell = doc.cells[0]
self.assertEqual(cell.id,cell_name.split('.')[0])
+ exp_num_segs = 9
+ self.assertEqual(cell.morphology.num_segments,exp_num_segs)
+ self.assertEqual(len(cell.get_segment_ids_vs_segments()),exp_num_segs)
+
+ seg0 = cell.get_segment(0)
+
+ self.assertRaises(Exception, lambda: cell.get_segment(-1)) # Seg -1 doesn't exist...
+
cell.summary()
@@ -73,3 +95,86 @@ def test_cell_methods(self):
assert all_len == soma_len+dend_len
+
+ def test_cell_methods2(self):
+ cell = Cell(id='cell0')
+
+ diam = 1.
+ d0=Point3DWithDiam(x=0, y=0, z=0, diameter=diam)
+ p=Point3DWithDiam(x=0, y=0, z=0, diameter=diam)
+
+ seg0 = Segment(id=0, name='soma',proximal=p, distal=d0)
+
+ d1=Point3DWithDiam(x=10, y=0, z=0, diameter=diam)
+
+ cell.morphology = Morphology()
+ cell.morphology.segments.append(seg0)
+
+ seg1 = Segment(id=1, distal=d1, parent=SegmentParent(0))
+ cell.morphology.segments.append(seg1)
+
+ d2=Point3DWithDiam(x=20, y=0, z=0, diameter=diam)
+
+ seg2 = Segment(id=2, proximal =d1, distal=d2, parent=SegmentParent(seg1.id))
+ cell.morphology.segments.append(seg2)
+
+ d3=Point3DWithDiam(x=20, y=10, z=0, diameter=diam)
+
+ seg3 = Segment(id=3, distal=d3, parent=SegmentParent(seg2.id, fraction_along=1))
+ cell.morphology.segments.append(seg3)
+
+ sg1 = SegmentGroup(id='all')
+ for seg in [seg0,seg1,seg2,seg3]:
+ sg1.members.append(Member(seg.id))
+ cell.morphology.segment_groups.append(sg1)
+
+ sg2 = SegmentGroup(id='soma_group')
+ for seg in [seg0]:
+ sg2.members.append(Member(seg.id))
+ cell.morphology.segment_groups.append(sg2)
+
+ sg3 = SegmentGroup(id='dend_group')
+ for seg in [seg1,seg2,seg3]:
+ sg3.members.append(Member(seg.id))
+ cell.morphology.segment_groups.append(sg3)
+
+ sg4 = SegmentGroup(id='soma_dends')
+ for sg in [sg2,sg3]:
+ sg4.includes.append(Include(sg.id))
+ cell.morphology.segment_groups.append(sg4)
+
+ expected = {sg1:4,sg2:1,sg3:3,sg4:4}
+
+ for sg in [sg1,sg2,sg3,sg4]:
+ segs = cell.get_all_segments_in_group(sg.id)
+ print('\nSeg group %s has segments: %s'%(sg,segs))
+ self.assertEqual(expected[sg],len(segs))
+
+ osegs = cell.get_ordered_segments_in_groups(sg.id)
+ print('Seg group %s has ordered segments: %s'%(sg.id,osegs))
+ self.assertEqual(expected[sg],len(osegs[sg.id]))
+
+ ord_segs, cumulative_lengths, path_lengths_to_proximal, path_lengths_to_distal = cell.get_ordered_segments_in_groups(sg.id,
+ include_cumulative_lengths=True,
+ include_path_lengths=True)
+
+ print('Seg group %s has cumulative_lengths: %s'%(sg.id,cumulative_lengths))
+ self.assertEqual(expected[sg],len(cumulative_lengths[sg.id]))
+
+ print('Seg group %s has path_lengths_to_proximal: %s'%(sg.id,path_lengths_to_proximal))
+ self.assertEqual(expected[sg],len(path_lengths_to_proximal[sg.id]))
+
+ print('Seg group %s has path_lengths_to_distal: %s'%(sg.id,path_lengths_to_distal))
+ self.assertEqual(expected[sg],len(path_lengths_to_distal[sg.id]))
+
+
+
+ def runTest(self):
+ print("Running tests in TestCell")
+
+if __name__ == '__main__':
+
+ ta = TestCell()
+
+ ta.test_cell_methods()
+ ta.test_cell_methods2()
\ No newline at end of file
diff --git a/neuroml/test/test_segment.py b/neuroml/test/test_segment.py
index 1f35320d..77342157 100644
--- a/neuroml/test/test_segment.py
+++ b/neuroml/test/test_segment.py
@@ -6,6 +6,9 @@
import math
from neuroml import Segment
+from neuroml import SegmentParent
+from neuroml import Cell
+from neuroml import Morphology
from neuroml import Point3DWithDiam
try:
@@ -175,6 +178,90 @@ def test_volume(self):
self.assertAlmostEqual(seg.volume, 0.7932855820702964, places=7)
+ def test_spherical(self):
+
+ diam = 1.
+ d=Point3DWithDiam(x=0,
+ y=0,
+ z=0,
+ diameter=diam)
+ p=Point3DWithDiam(x=0,
+ y=0,
+ z=0,
+ diameter=diam)
+
+ seg = Segment(id=0, proximal=p, distal=d)
+
+ self.assertEqual(seg.length, 0)
+ self.assertEqual(seg.surface_area, math.pi)
+
+
+ def test_cell_with_segs(self):
+
+ cell = Cell(id='cell0')
+
+ diam = 1.
+ d0=Point3DWithDiam(x=0, y=0, z=0, diameter=diam)
+ p=Point3DWithDiam(x=0, y=0, z=0, diameter=diam)
+
+ seg0 = Segment(id=0, proximal=p, distal=d0)
+
+ d1=Point3DWithDiam(x=10, y=0, z=0, diameter=diam)
+
+ cell.morphology = Morphology()
+ cell.morphology.segments.append(seg0)
+
+ seg1 = Segment(id=1, distal=d1, parent=SegmentParent(0))
+ cell.morphology.segments.append(seg1)
+
+ d2=Point3DWithDiam(x=20, y=0, z=0, diameter=diam)
+
+ seg2 = Segment(id=2, proximal =d1, distal=d2, parent=SegmentParent(seg1.id))
+ cell.morphology.segments.append(seg2)
+
+ d3=Point3DWithDiam(x=15, y=10, z=0, diameter=diam)
+
+ seg3 = Segment(id=3, distal=d3, parent=SegmentParent(seg2.id, fraction_along=0.5))
+ cell.morphology.segments.append(seg3)
+ for f in [0,0.25,0.5,0.75,1]:
+ seg3.parent.fraction_along = f
+ print('Fract: %s, length: %s, proximal: %s'%(f, cell.get_segment_length(seg3.id), cell.get_actual_proximal(seg3.id)))
+
+ self.assertEqual(seg0.length, 0)
+ self.assertEqual(cell.get_segment_length(seg0.id), 0)
+
+ self.assertRaises(Exception, lambda: seg1.length) # No proximal
+ self.assertEqual(cell.get_segment_length(seg1.id), 10)
+
+ self.assertEqual(seg2.length, 10)
+ self.assertEqual(cell.get_segment_length(seg2.id), 10)
+
+
+ self.assertEqual(seg0.surface_area, math.pi)
+ self.assertEqual(cell.get_segment_surface_area(seg0.id), math.pi)
+ self.assertRaises(Exception, lambda: seg1.surface_area) # No proximal
+ self.assertEqual(cell.get_segment_surface_area(seg1.id), math.pi*10)
+ self.assertEqual(seg2.surface_area, math.pi*10)
+ self.assertEqual(cell.get_segment_surface_area(seg2.id), math.pi*10)
+
+ v = 4.0/3*math.pi*(diam/2)**3
+ self.assertEqual(seg0.volume, v)
+ self.assertEqual(cell.get_segment_volume(seg0.id), v)
+ v = math.pi*seg2.proximal.diameter/2*seg2.proximal.diameter/2*seg2.length
+ self.assertRaises(Exception, lambda: seg1.volume) # No proximal
+ self.assertAlmostEqual(cell.get_segment_volume(seg1.id), v, places=7)
+ self.assertAlmostEqual(seg2.volume, v, places=7)
+ self.assertAlmostEqual(cell.get_segment_volume(seg2.id), v, places=7)
+
+
+ # Break the sphere...
+ seg0.distal.diameter = diam*2
+ self.assertRaises(Exception, lambda: seg0.surface_area)
+ self.assertRaises(Exception, lambda: seg0.volume)
+
+ print('Passed...')
+ ''' '''
+
def runTest(self):
print("Running tests in TestHelperProperties")
@@ -195,4 +282,7 @@ class TestAttachedSegments(unittest.TestCase):
ta.test_volume0()
ta.test_volume1()
- ta.test_volume()
\ No newline at end of file
+ ta.test_volume()
+
+ ta.test_spherical()
+ ta.test_cell_with_segs()
\ No newline at end of file
From 76399ab4715d758a965bcf9ee71aebf2a8c92b70 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Tue, 16 Jun 2020 19:08:02 +0100
Subject: [PATCH 04/44] Tweaking
---
neuroml/test/test_cell.py | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/neuroml/test/test_cell.py b/neuroml/test/test_cell.py
index b80ea45c..c17a6323 100644
--- a/neuroml/test/test_cell.py
+++ b/neuroml/test/test_cell.py
@@ -143,29 +143,29 @@ def test_cell_methods2(self):
sg4.includes.append(Include(sg.id))
cell.morphology.segment_groups.append(sg4)
- expected = {sg1:4,sg2:1,sg3:3,sg4:4}
+ expected = {sg1.id:4,sg2.id:1,sg3.id:3,sg4.id:4}
for sg in [sg1,sg2,sg3,sg4]:
segs = cell.get_all_segments_in_group(sg.id)
print('\nSeg group %s has segments: %s'%(sg,segs))
- self.assertEqual(expected[sg],len(segs))
+ self.assertEqual(expected[sg.id],len(segs))
osegs = cell.get_ordered_segments_in_groups(sg.id)
print('Seg group %s has ordered segments: %s'%(sg.id,osegs))
- self.assertEqual(expected[sg],len(osegs[sg.id]))
+ self.assertEqual(expected[sg.id],len(osegs[sg.id]))
ord_segs, cumulative_lengths, path_lengths_to_proximal, path_lengths_to_distal = cell.get_ordered_segments_in_groups(sg.id,
include_cumulative_lengths=True,
include_path_lengths=True)
print('Seg group %s has cumulative_lengths: %s'%(sg.id,cumulative_lengths))
- self.assertEqual(expected[sg],len(cumulative_lengths[sg.id]))
+ self.assertEqual(expected[sg.id],len(cumulative_lengths[sg.id]))
print('Seg group %s has path_lengths_to_proximal: %s'%(sg.id,path_lengths_to_proximal))
- self.assertEqual(expected[sg],len(path_lengths_to_proximal[sg.id]))
+ self.assertEqual(expected[sg.id],len(path_lengths_to_proximal[sg.id]))
print('Seg group %s has path_lengths_to_distal: %s'%(sg.id,path_lengths_to_distal))
- self.assertEqual(expected[sg],len(path_lengths_to_distal[sg.id]))
+ self.assertEqual(expected[sg.id],len(path_lengths_to_distal[sg.id]))
From 544c202d5519ea2751ef3ad6ba64253b3b677dfa Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Wed, 17 Jun 2020 11:07:07 +0100
Subject: [PATCH 05/44] Minor addition to cell.get_all_segments_in_group() to
assume 'all'=all
---
neuroml/nml/helper_methods.py | 9 +++++++--
neuroml/nml/nml.py | 11 ++++++++---
2 files changed, 15 insertions(+), 5 deletions(-)
diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py
index fbc15d41..b6819f3b 100644
--- a/neuroml/nml/helper_methods.py
+++ b/neuroml/nml/helper_methods.py
@@ -864,13 +864,18 @@ def get_segment_ids_vs_segments(self):
return segments
def get_all_segments_in_group(self,
- segment_group):
+ segment_group,
+ assume_all_means_all=True):
if isinstance(segment_group, str):
for sg in self.morphology.segment_groups:
if sg.id == segment_group:
segment_group = sg
- if isinstance(segment_group, str): #
+ if isinstance(segment_group, str):
+
+ if assume_all_means_all and segment_group=='all': # i.e. wasn't explicitly defined, but assume it means all segments
+ return [seg.id for seg in self.morphology.segments]
+
raise Exception('No segment group '+segment_group+ ' found in cell '+self.id)
all_segs = []
diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py
index e7e25923..7da8fe24 100644
--- a/neuroml/nml/nml.py
+++ b/neuroml/nml/nml.py
@@ -2,7 +2,7 @@
# -*- coding: utf-8 -*-
#
-# Generated Tue Jun 16 15:29:34 2020 by generateDS.py version 2.30.11.
+# Generated Wed Jun 17 09:57:07 2020 by generateDS.py version 2.30.11.
# Python 2.7.15 |Anaconda, Inc.| (default, Oct 23 2018, 13:35:16) [GCC 4.2.1 Compatible Clang 4.0.1 (tags/RELEASE_401/final)]
#
# Command line options:
@@ -17843,13 +17843,18 @@ def get_segment_ids_vs_segments(self):
return segments
def get_all_segments_in_group(self,
- segment_group):
+ segment_group,
+ assume_all_means_all=True):
if isinstance(segment_group, str):
for sg in self.morphology.segment_groups:
if sg.id == segment_group:
segment_group = sg
- if isinstance(segment_group, str): #
+ if isinstance(segment_group, str):
+
+ if assume_all_means_all and segment_group=='all': # i.e. wasn't explicitly defined, but assume it means all segments
+ return [seg.id for seg in self.morphology.segments]
+
raise Exception('No segment group '+segment_group+ ' found in cell '+self.id)
all_segs = []
From 7b44a395e4c666a3ec0f210e0aadb0766dbb0086 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Wed, 29 Jul 2020 10:32:00 +0100
Subject: [PATCH 06/44] Test on py3.8
---
.travis.yml | 6 +++---
neuroml/nml/helper_methods.py | 2 +-
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 21afd7fe..149b7136 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -8,10 +8,10 @@ addons:
matrix:
include:
- - python: 3.7
- - python: 3.6
- - python: 3.5
- python: 2.7
+ - python: 3.6
+ - python: 3.7
+ - python: 3.8
# command to install dependencies
install:
diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py
index b6819f3b..f74363a0 100644
--- a/neuroml/nml/helper_methods.py
+++ b/neuroml/nml/helper_methods.py
@@ -614,7 +614,7 @@ def summary(self, show_includes=True, show_non_network=True):
if isinstance(memb[1], list) and len(memb[1])>0 and not memb[0].endswith('_') and not memb[0] == 'networks':
if (memb[0] == 'includes' and show_includes) or (not memb[0] == 'includes' and show_non_network):
post = "*\\n"
- info+="* "+str(memb[1][0].__class__.__name__)+": "
+ info+="* "+str(memb[1][0].__class__.__name__)+": "
listed = []
for entry in memb[1]:
if hasattr(entry,'id'):
From d5a952872fec7e5e915d012015201d00a1eb0b0c Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Fri, 4 Sep 2020 14:21:11 +0100
Subject: [PATCH 07/44] Add a notebook on morphologies
---
.gitignore | 1 +
notebooks/CellMorphology.ipynb | 296 +++++++++++++++++++++++++++++++++
2 files changed, 297 insertions(+)
create mode 100644 notebooks/CellMorphology.ipynb
diff --git a/.gitignore b/.gitignore
index 76f1626f..6b89c6d7 100644
--- a/.gitignore
+++ b/.gitignore
@@ -46,3 +46,4 @@ neuroml/test/*.h5
/neuroml/examples/report.txt
/neuroml/examples/test_files/report.txt
/tests.log
+/notebooks/.ipynb_checkpoints
diff --git a/notebooks/CellMorphology.ipynb b/notebooks/CellMorphology.ipynb
new file mode 100644
index 00000000..8cc9082e
--- /dev/null
+++ b/notebooks/CellMorphology.ipynb
@@ -0,0 +1,296 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Examples of using libNeuroML to handle cell morphologies\n",
+ "\n",
+ "First load a [NeuroML file containing a single cell](https://github.com/NeuralEnsemble/libNeuroML/blob/master/neuroml/examples/test_files/pyr_4_sym.cell.nml)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Loaded cell pyr_4_sym from: ../neuroml/examples/test_files/pyr_4_sym.cell.nml\n"
+ ]
+ }
+ ],
+ "source": [
+ "import neuroml\n",
+ "import neuroml.loaders as loaders\n",
+ "\n",
+ "fn = '../neuroml/examples/test_files/pyr_4_sym.cell.nml'\n",
+ "doc = loaders.NeuroMLLoader.load(fn)\n",
+ "cell = doc.cells[0]\n",
+ "print(\"Loaded cell %s from: %s\"%(cell.id,fn))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Summary\n",
+ "\n",
+ "Print a brief summary of the cell properties"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "*******************************************************\n",
+ "* Cell: pyr_4_sym\n",
+ "* Notes: A simplified pyramidal cell model with 9 compartments\n",
+ "* Segments: 9\n",
+ "* SegmentGroups: 18\n",
+ "*******************************************************\n"
+ ]
+ }
+ ],
+ "source": [
+ "cell.summary()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Segments\n",
+ "\n",
+ "Print information on the segments present"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[, , , , , , , , ]\n",
+ "\n",
+ "Segment soma, id: 0 has proximal point: (0.0, 0.0, 0.0), diam 23.0um, distal: (0.0, 17.0, 0.0), diam 23.0um\n",
+ " Parent segment: None (root segment)\n",
+ "\n",
+ "Segment apical0, id: 1 has proximal point: (0.0, 17.0, 0.0), diam 6.0um, distal: (0.0, 77.0, 0.0), diam 6.0um\n",
+ " Parent segment: 0; fraction along: 1.0\n",
+ "\n",
+ "Segment apical2, id: 2 has proximal point: (0.0, 77.0, 0.0), diam 4.4um, distal: (0.0, 477.0, 0.0), diam 4.4um\n",
+ " Parent segment: 1; fraction along: 1.0\n",
+ "\n",
+ "Segment apical3, id: 3 has proximal point: (0.0, 477.0, 0.0), diam 2.9um, distal: (0.0, 877.0, 0.0), diam 2.9um\n",
+ " Parent segment: 2; fraction along: 1.0\n",
+ "\n",
+ "Segment apical4, id: 4 has proximal point: (0.0, 877.0, 0.0), diam 2.0um, distal: (0.0, 1127.0, 0.0), diam 2.0um\n",
+ " Parent segment: 3; fraction along: 1.0\n",
+ "\n",
+ "Segment apical1, id: 5 has proximal point: (0.0, 77.0, 0.0), diam 3.0um, distal: (-150.0, 77.0, 0.0), diam 3.0um\n",
+ " Parent segment: 1; fraction along: 1.0\n",
+ "\n",
+ "Segment basal0, id: 6 has proximal point: (0.0, 0.0, 0.0), diam 4.0um, distal: (0.0, -50.0, 0.0), diam 4.0um\n",
+ " Parent segment: 0; fraction along: 0.0\n",
+ "\n",
+ "Segment basal1, id: 7 has proximal point: (0.0, -50.0, 0.0), diam 5.0um, distal: (106.07, -156.07, 0.0), diam 5.0um\n",
+ " Parent segment: 6; fraction along: 1.0\n",
+ "\n",
+ "Segment basal2, id: 8 has proximal point: (0.0, -50.0, 0.0), diam 5.0um, distal: (-106.07, -156.07, 0.0), diam 5.0um\n",
+ " Parent segment: 6; fraction along: 1.0\n"
+ ]
+ }
+ ],
+ "source": [
+ "segments = cell.morphology.segments\n",
+ "print(segments)\n",
+ "\n",
+ "for seg in segments:\n",
+ " print('\\nSegment %s, id: %s has proximal point: %s, distal: %s'%(seg.name, seg.id, seg.proximal, seg.distal))\n",
+ " print(' Parent segment: %s%s'%(seg.parent.segments if seg.parent else 'None (root segment)', \n",
+ " '; fraction along: %s'%seg.parent.fraction_along if seg.parent else ''))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Helper methods\n",
+ "\n",
+ "Note that not all segments have an explicit **proximal**. This may be omitted if the **proximal** point is the same as the **distal** of the parent. For convenience, the helper method on **cell**, **get_actual_proximal()** can be used.\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {
+ "scrolled": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ " has proximal: (0.0, 0.0, 0.0), diam 23.0um, distal: (0.0, 17.0, 0.0), diam 23.0um\n",
+ " has proximal: (0.0, 17.0, 0.0), diam 6.0um, distal: (0.0, 77.0, 0.0), diam 6.0um\n",
+ " has proximal: (0.0, 77.0, 0.0), diam 4.4um, distal: (0.0, 477.0, 0.0), diam 4.4um\n",
+ " has proximal: (0.0, 477.0, 0.0), diam 2.9um, distal: (0.0, 877.0, 0.0), diam 2.9um\n",
+ " has proximal: (0.0, 877.0, 0.0), diam 2.0um, distal: (0.0, 1127.0, 0.0), diam 2.0um\n",
+ " has proximal: (0.0, 77.0, 0.0), diam 3.0um, distal: (-150.0, 77.0, 0.0), diam 3.0um\n",
+ " has proximal: (0.0, 0.0, 0.0), diam 4.0um, distal: (0.0, -50.0, 0.0), diam 4.0um\n",
+ " has proximal: (0.0, -50.0, 0.0), diam 5.0um, distal: (106.07, -156.07, 0.0), diam 5.0um\n",
+ " has proximal: (0.0, -50.0, 0.0), diam 5.0um, distal: (-106.07, -156.07, 0.0), diam 5.0um\n"
+ ]
+ }
+ ],
+ "source": [
+ "\n",
+ "for seg in segments:\n",
+ " print('%s has proximal: %s, distal: %s'%(seg, \n",
+ " cell.get_actual_proximal(seg.id), \n",
+ " seg.distal))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Other convenient helper methods include **get_segment_length()**, **get_segment_surface_area()** and **get_segment_volume()**, which also use the correct **proximal** point."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Segment has length: 17.0 um, area: 1228.36272755 um^2, volume: 7063.08568343 um^3\n"
+ ]
+ }
+ ],
+ "source": [
+ "\n",
+ "soma = cell.get_segment(0)\n",
+ "print('Segment %s has length: %s um, area: %s um^2, volume: %s um^3'%(soma, \n",
+ " cell.get_segment_length(soma.id), \n",
+ " cell.get_segment_surface_area(soma.id), \n",
+ " cell.get_segment_volume(soma.id)))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Segment Groups\n",
+ "\n",
+ "Segment groups can refer to multiple segments (e.g. so that cannel densities can be placed across the cell).\n",
+ "\n",
+ "They can include individual segments (e.g. ``) and refer to other segment groups (e.g. ``)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "SegmentGroup: soma, 1 member(s), 0 included group(s)\n",
+ "SegmentGroup: apical0, 1 member(s), 0 included group(s)\n",
+ "SegmentGroup: apical2, 1 member(s), 0 included group(s)\n",
+ "SegmentGroup: apical3, 1 member(s), 0 included group(s)\n",
+ "SegmentGroup: apical4, 1 member(s), 0 included group(s)\n",
+ "SegmentGroup: apical1, 1 member(s), 0 included group(s)\n",
+ "SegmentGroup: basal0, 1 member(s), 0 included group(s)\n",
+ "SegmentGroup: basal1, 1 member(s), 0 included group(s)\n",
+ "SegmentGroup: basal2, 1 member(s), 0 included group(s)\n",
+ "SegmentGroup: all, 0 member(s), 9 included group(s)\n",
+ "SegmentGroup: soma_group, 0 member(s), 1 included group(s)\n",
+ "SegmentGroup: dendrite_group, 0 member(s), 8 included group(s)\n",
+ "SegmentGroup: apical_dends, 0 member(s), 5 included group(s)\n",
+ "SegmentGroup: middle_apical_dendrite, 0 member(s), 1 included group(s)\n",
+ "SegmentGroup: thalamic_input, 0 member(s), 1 included group(s)\n",
+ "SegmentGroup: basal_dends, 0 member(s), 3 included group(s)\n",
+ "SegmentGroup: basal_gaba_input, 0 member(s), 1 included group(s)\n",
+ "SegmentGroup: background_input, 0 member(s), 1 included group(s)\n"
+ ]
+ }
+ ],
+ "source": [
+ "for sg in cell.morphology.segment_groups:\n",
+ " print(sg)\n",
+ " if sg.id == 'dendrite_group':\n",
+ " dendrite_group = sg"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "A helper method **get_all_segments_in_group()** will return a list of the segments in that group:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[1, 2, 3, 4, 5, 6, 7, 8]\n"
+ ]
+ }
+ ],
+ "source": [
+ "dend_segs = cell.get_all_segments_in_group(dendrite_group.id)\n",
+ "print(dend_segs)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.15"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
From 264d38165f0a452250b1818aa8a730318c1053f9 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Fri, 4 Sep 2020 14:33:10 +0100
Subject: [PATCH 08/44] Update
---
notebooks/CellMorphology.ipynb | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/notebooks/CellMorphology.ipynb b/notebooks/CellMorphology.ipynb
index 8cc9082e..83940c0b 100644
--- a/notebooks/CellMorphology.ipynb
+++ b/notebooks/CellMorphology.ipynb
@@ -128,7 +128,9 @@
"source": [
"### Helper methods\n",
"\n",
- "Note that not all segments have an explicit **proximal**. This may be omitted if the **proximal** point is the same as the **distal** of the parent. For convenience, the helper method on **cell**, **get_actual_proximal()** can be used.\n"
+ "Note that not all segments have an explicit **proximal**. This may be omitted if the **proximal** point is the same as the **distal** of the parent. For convenience, the helper method on **cell**, **get_actual_proximal()** can be used.\n",
+ "\n",
+ "Note that these helper methods (and all the extra helper code that fed into the libNeuroML API) is specified [here](https://github.com/NeuralEnsemble/libNeuroML/blob/27b964c93c86796e5dcdc7d32c72e97e06ffccc2/neuroml/nml/helper_methods.py#L727), and is eventually generated into the main [nml.py](https://raw.githubusercontent.com/NeuralEnsemble/libNeuroML/master/neuroml/nml/nml.py).\n"
]
},
{
From db9a9909b4c4ae3d4b4ab76f16ac3fd3acc66a3c Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Mon, 19 Oct 2020 18:28:13 +0100
Subject: [PATCH 09/44] Fixes issue reading nml in h5 on python3
---
neuroml/__init__.py | 2 +-
neuroml/hdf5/NeuroMLHdf5Parser.py | 7 ++++++-
2 files changed, 7 insertions(+), 2 deletions(-)
diff --git a/neuroml/__init__.py b/neuroml/__init__.py
index 6b1c6841..15ef42ad 100644
--- a/neuroml/__init__.py
+++ b/neuroml/__init__.py
@@ -1,6 +1,6 @@
from .nml.nml import * # allows importation of all neuroml classes
-__version__ = '0.2.52'
+__version__ = '0.2.53'
__version_info__ = tuple(int(i) for i in __version__.split('.'))
diff --git a/neuroml/hdf5/NeuroMLHdf5Parser.py b/neuroml/hdf5/NeuroMLHdf5Parser.py
index eea20e90..69c7b738 100644
--- a/neuroml/hdf5/NeuroMLHdf5Parser.py
+++ b/neuroml/hdf5/NeuroMLHdf5Parser.py
@@ -270,6 +270,8 @@ def parse_dataset(self, d):
for attrName in d.attrs._v_attrnames:
val = d.attrs.__getattr__(attrName)
+ if not isinstance(val, str):
+ val = val.decode()
#self.log.debug("Val of attribute: "+ attrName + " is "+ str(val))
@@ -321,7 +323,7 @@ def parse_dataset(self, d):
row = d[i,:]
id = int(row[indexId]) if indexId>0 else i
-
+
preCellId = int(row[indexPreCellId])
if indexPreSegId >= 0:
@@ -387,6 +389,8 @@ def parse_dataset(self, d):
for attrName in d.attrs._v_attrnames:
val = d.attrs.__getattr__(attrName)
+ if not isinstance(val, str):
+ val = val.decode()
self.log.debug("Val of attribute: "+ attrName + " is "+ str(val))
@@ -609,6 +613,7 @@ def end_group(self, g):
file_name = '../examples/test_files/complete.nml.h5'
+ #file_name = '../../../neuroConstruct/osb/showcase/NetPyNEShowcase/NeuroML2/scaling/Balanced.net.nml.h5'
logging.basicConfig(level=logging.DEBUG, format="%(name)-19s %(levelname)-5s - %(message)s")
From 2be6c4e032f3d62f190002f8d2a305715a4f3692 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Tue, 1 Dec 2020 15:55:08 +0000
Subject: [PATCH 10/44] Update information on generateds.py
Updated the README to refer to PyPi and the primary Source Forge
project.
Since BitBucket no longer supports mercurial, the BitBucket repository
that was referred to is no longer available.
---
neuroml/nml/README.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/neuroml/nml/README.md b/neuroml/nml/README.md
index eba907c7..7116099c 100644
--- a/neuroml/nml/README.md
+++ b/neuroml/nml/README.md
@@ -1,8 +1,8 @@
## Autogeneration of API, using generateds_config.py to ensure correct naming conventions.
-NOTE: this requires the latest version of generateDS.py from https://bitbucket.org/dkuhlman/generateds
+This requires [generateDS.py](http://www.davekuhlman.org/generateDS.html), version >= 2.20a.
-**For Python 3 compatibility, make sure this is >= version 2.20a**
+You can get it from [PyPi](http://pypi.python.org/pypi/generateDS/) or [Source Forge](https://sourceforge.net/projects/generateds/).
All that is needed is the Schema - as long as generateds_config.py and helper_methods are present, nml.py should be generated correctly.
From bbd0a2a545a08dfa784425e471bb079b436143d4 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Thu, 4 Feb 2021 14:41:24 +0000
Subject: [PATCH 11/44] Test on py3.9
---
.gitignore | 1 +
.travis.yml | 2 +-
neuroml/examples/test_files/complete.nml | 292 +++++++++++------------
neuroml/test/test_loaders.py | 13 +-
notebooks/CellMorphology.ipynb | 10 +-
5 files changed, 162 insertions(+), 156 deletions(-)
diff --git a/.gitignore b/.gitignore
index 6b89c6d7..157897d1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -47,3 +47,4 @@ neuroml/test/*.h5
/neuroml/examples/test_files/report.txt
/tests.log
/notebooks/.ipynb_checkpoints
+/mongoo
diff --git a/.travis.yml b/.travis.yml
index 149b7136..f1005400 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -9,9 +9,9 @@ addons:
matrix:
include:
- python: 2.7
- - python: 3.6
- python: 3.7
- python: 3.8
+ - python: 3.9
# command to install dependencies
install:
diff --git a/neuroml/examples/test_files/complete.nml b/neuroml/examples/test_files/complete.nml
index b7638c0f..fa250ae2 100644
--- a/neuroml/examples/test_files/complete.nml
+++ b/neuroml/examples/test_files/complete.nml
@@ -17,185 +17,185 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
-
+
diff --git a/neuroml/test/test_loaders.py b/neuroml/test/test_loaders.py
index 70713f59..08498062 100644
--- a/neuroml/test/test_loaders.py
+++ b/neuroml/test/test_loaders.py
@@ -15,12 +15,17 @@
class TestNeuroMLLoader(unittest.TestCase):
def test_load_neuroml(self):
root_dir = os.path.dirname(neuroml.__file__)
- print('root dir is:')
- print(root_dir)
+ print('root dir is: '+root_dir)
test_file_path = os.path.join(root_dir,'examples/test_files/Purk2M9s.nml')
- print('test file path is:')
- print(test_file_path)
+ print('test fi le path is: ' + test_file_path)
+
f = open(test_file_path,'r')
#print(f.read())
doc = loaders.NeuroMLLoader.load(test_file_path)
self.assertEqual(doc.id,'Purk2M9s')
+ f.close()
+ print('Finished test')
+
+if __name__ == "__main__":
+ t = TestNeuroMLLoader()
+ t.test_load_neuroml()
diff --git a/notebooks/CellMorphology.ipynb b/notebooks/CellMorphology.ipynb
index 83940c0b..a4ef745b 100644
--- a/notebooks/CellMorphology.ipynb
+++ b/notebooks/CellMorphology.ipynb
@@ -276,21 +276,21 @@
],
"metadata": {
"kernelspec": {
- "display_name": "Python 2",
+ "display_name": "Python 3",
"language": "python",
- "name": "python2"
+ "name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
- "version": 2
+ "version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
- "pygments_lexer": "ipython2",
- "version": "2.7.15"
+ "pygments_lexer": "ipython3",
+ "version": "3.7.7"
}
},
"nbformat": 4,
From 7b5c55d6d2cad16ee04b9de6fc986d22731a1c9b Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Mon, 15 Feb 2021 12:44:20 +0000
Subject: [PATCH 12/44] Regenerated
---
neuroml/nml/config.py | 2 +-
neuroml/nml/nml.py | 8 ++++----
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/neuroml/nml/config.py b/neuroml/nml/config.py
index a2c895e9..447f42f1 100644
--- a/neuroml/nml/config.py
+++ b/neuroml/nml/config.py
@@ -1 +1 @@
-variables={'schema_name':'NeuroML_v2beta5.xsd'}
+variables={'schema_name':'NeuroML_v2.1.xsd'}
diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py
index 7da8fe24..e1d0fc06 100644
--- a/neuroml/nml/nml.py
+++ b/neuroml/nml/nml.py
@@ -2,8 +2,8 @@
# -*- coding: utf-8 -*-
#
-# Generated Wed Jun 17 09:57:07 2020 by generateDS.py version 2.30.11.
-# Python 2.7.15 |Anaconda, Inc.| (default, Oct 23 2018, 13:35:16) [GCC 4.2.1 Compatible Clang 4.0.1 (tags/RELEASE_401/final)]
+# Generated Mon Feb 15 12:34:31 2021 by generateDS.py version 2.30.11.
+# Python 2.7.16 |Anaconda, Inc.| (default, Mar 14 2019, 21:00:58) [GCC 7.3.0]
#
# Command line options:
# ('-o', 'nml.py')
@@ -15,7 +15,7 @@
# NeuroML_v2.1.xsd
#
# Command line:
-# /Users/padraig/anaconda/envs/py27/bin/generateDS.py -o "nml.py" --use-getter-setter="none" --silence --user-methods="helper_methods" NeuroML_v2.1.xsd
+# /home/padraig/anaconda2/bin/generateDS.py -o "nml.py" --use-getter-setter="none" --silence --user-methods="helper_methods" NeuroML_v2.1.xsd
#
# Current working directory (os.getcwd()):
# nml
@@ -16063,7 +16063,7 @@ def summary(self, show_includes=True, show_non_network=True):
if isinstance(memb[1], list) and len(memb[1])>0 and not memb[0].endswith('_') and not memb[0] == 'networks':
if (memb[0] == 'includes' and show_includes) or (not memb[0] == 'includes' and show_non_network):
post = "*\n"
- info+="* "+str(memb[1][0].__class__.__name__)+": "
+ info+="* "+str(memb[1][0].__class__.__name__)+": "
listed = []
for entry in memb[1]:
if hasattr(entry,'id'):
From 16c61ade72133cb910b154c740514642567d1099 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Mon, 15 Feb 2021 14:05:28 +0000
Subject: [PATCH 13/44] Initial changes for
https://github.com/NeuroML/pyNeuroML/issues/73
---
neuroml/__init__.py | 2 +-
neuroml/nml/helper_methods.py | 789 ++++++++++++++++-----------------
neuroml/nml/nml.py | 796 +++++++++++++++++-----------------
3 files changed, 797 insertions(+), 790 deletions(-)
diff --git a/neuroml/__init__.py b/neuroml/__init__.py
index 15ef42ad..4fc1d351 100644
--- a/neuroml/__init__.py
+++ b/neuroml/__init__.py
@@ -1,6 +1,6 @@
from .nml.nml import * # allows importation of all neuroml classes
-__version__ = '0.2.53'
+__version__ = '0.2.54'
__version_info__ = tuple(int(i) for i in __version__.split('.'))
diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py
index f74363a0..bdd9d526 100644
--- a/neuroml/nml/helper_methods.py
+++ b/neuroml/nml/helper_methods.py
@@ -51,7 +51,7 @@ def match_name(self, class_name):
If this method returns True, the method will be inserted in
the generated class.
"""
-
+
if self.class_names == class_name or (isinstance(self.class_names,list) and class_name in self.class_names):
return True
else:
@@ -95,14 +95,14 @@ def num_segments(self):
source='''\
@property
def length(self):
-
+
if self.proximal==None:
raise Exception('Cannot get length of segment '+str(self.id)+' using the length property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_length(segment_id) on the cell instead.')
-
+
prox_x = self.proximal.x
prox_y = self.proximal.y
prox_z = self.proximal.z
-
+
dist_x = self.distal.x
dist_y = self.distal.y
dist_z = self.distal.z
@@ -110,40 +110,40 @@ def length(self):
length = ((prox_x-dist_x)**2 + (prox_y-dist_y)**2 + (prox_z-dist_z)**2)**(0.5)
return length
-
+
def __str__(self):
-
+
return ""
-
+
def __repr__(self):
-
+
return str(self)
-
+
''',
class_names=("Segment")
)
-
+
volume = MethodSpec(name='volume',
source='''\
@property
def volume(self):
-
+
from math import pi
if self.proximal==None:
raise Exception('Cannot get volume of segment '+str(self.id)+' using the volume property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_volume(segment_id) on the cell instead.')
-
+
prox_rad = self.proximal.diameter/2.0
dist_rad = self.distal.diameter/2.0
-
+
if self.proximal.x == self.distal.x and \
self.proximal.y == self.distal.y and \
self.proximal.z == self.distal.z:
-
+
if prox_rad!=dist_rad:
raise Exception('Cannot get volume of segment '+str(self.id)+'. The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the volume calculation ambiguous.')
-
+
return 4.0/3 * pi * prox_rad**3
-
+
length = self.length
volume = (pi/3)*length*(prox_rad**2+dist_rad**2+prox_rad*dist_rad)
@@ -161,26 +161,26 @@ def volume(self):
def surface_area(self):
from math import pi
from math import sqrt
-
+
if self.proximal==None:
raise Exception('Cannot get surface area of segment '+str(self.id)+' using the surface_area property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_surface_area(segment_id) on the cell instead.')
-
+
prox_rad = self.proximal.diameter/2.0
dist_rad = self.distal.diameter/2.0
-
+
if self.proximal.x == self.distal.x and \
self.proximal.y == self.distal.y and \
self.proximal.z == self.distal.z:
-
+
if prox_rad!=dist_rad:
raise Exception('Cannot get surface area of segment '+str(self.id)+'. The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the surface area calculation ambiguous.')
-
+
return 4.0 * pi * prox_rad**2
-
+
length = self.length
surface_area = pi*(prox_rad+dist_rad)*sqrt((prox_rad-dist_rad)**2+length**2)
-
+
return surface_area
''',
class_names=("Segment")
@@ -195,38 +195,38 @@ def surface_area(self):
surface_area,
num_segments,
)
-
-
-
+
+
+
seg_grp = MethodSpec(name='SegmentGroup',
source='''\
-
+
def __str__(self):
-
+
return "SegmentGroup: "+str(self.id)+", "+str(len(self.members))+" member(s), "+str(len(self.includes))+" included group(s)"
-
+
def __repr__(self):
-
+
return str(self)
-
+
''',
class_names=("SegmentGroup")
)
-
+
METHOD_SPECS+=(seg_grp,)
-
+
seg_grp = MethodSpec(name='Point3DWithDiam',
source='''\
def __str__(self):
-
+
return "("+str(self.x)+", "+str(self.y)+", "+str(self.z)+"), diam "+str(self.diameter)+"um"
-
+
def __repr__(self):
-
+
return str(self)
-
+
def distance_to(self, other_3d_point):
a_x = self.x
a_y = self.y
@@ -238,7 +238,7 @@ def distance_to(self, other_3d_point):
distance = ((a_x-b_x)**2 + (a_y-b_y)**2 + (a_z-b_z)**2)**(0.5)
return distance
-
+
''',
class_names=("Point3DWithDiam")
)
@@ -256,266 +256,266 @@ def _get_cell_id(self, id_string):
return int(id_string.split('/')[2])
def get_pre_cell_id(self):
-
+
return self._get_cell_id(self.pre_cell_id)
-
+
def get_post_cell_id(self):
-
+
return self._get_cell_id(self.post_cell_id)
def get_pre_segment_id(self):
-
+
return int(self.pre_segment_id)
-
+
def get_post_segment_id(self):
-
+
return int(self.post_segment_id)
def get_pre_fraction_along(self):
-
+
return float(self.pre_fraction_along)
-
+
def get_post_fraction_along(self):
-
+
return float(self.post_fraction_along)
-
-
+
+
def get_pre_info(self):
-
+
return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '')
-
+
def get_post_info(self):
-
+
return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '')
-
+
def __str__(self):
-
+
return "Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())
-
+
''',
class_names=(["Connection","ConnectionWD"])
)
-
+
METHOD_SPECS+=(connection_cell_ids,)
-
+
connection_wd_cell_ids = MethodSpec(name='connection_wd_cell_ids',
source='''\
-
+
def __str__(self):
-
+
return "Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ \
", weight: "+'PERCENTAGEf' PERCENTAGE (float(self.weight))+", delay: "+'PERCENTAGE.5f' PERCENTAGE (self.get_delay_in_ms())+" ms"
-
+
def get_delay_in_ms(self):
if 'ms' in self.delay:
return float(self.delay[:-2].strip())
elif 's' in self.delay:
return float(self.delay[:-1].strip())*1000.0
-
+
''',
class_names=("ConnectionWD")
)
-
+
METHOD_SPECS+=(connection_wd_cell_ids,)
elec_connection_instance_cell_ids = MethodSpec(name='elec_connection_instance_cell_ids',
source='''\
-
+
def _get_cell_id(self, id_string):
if '[' in id_string:
return int(id_string.split('[')[1].split(']')[0])
else:
return int(id_string.split('/')[2])
-
+
def __str__(self):
-
+
return "Electrical Connection (Instance based) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ \
", synapse: "+str(self.synapse)
-
-
+
+
''',
class_names=("ElectricalConnectionInstance")
)
-
+
METHOD_SPECS+=(elec_connection_instance_cell_ids,)
elec_connection_instance_w = MethodSpec(name='elec_connection_instance_w',
source='''\
-
+
def get_weight(self):
-
+
return float(self.weight) if self.weight!=None else 1.0
-
+
def __str__(self):
-
+
return "Electrical Connection (Instance based & weight) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ \
", synapse: "+str(self.synapse) + ", weight: "+'PERCENTAGE.6f'PERCENTAGEself.get_weight()
-
+
''',
class_names=("ElectricalConnectionInstanceW")
)
-
+
METHOD_SPECS+=(elec_connection_instance_w,)
elec_connection_cell_ids = MethodSpec(name='elec_connection_cell_ids',
source='''\
-
+
def _get_cell_id(self, id_string):
return int(float(id_string))
-
+
def get_pre_cell_id(self):
-
+
return self._get_cell_id(self.pre_cell)
-
+
def get_post_cell_id(self):
-
+
return self._get_cell_id(self.post_cell)
-
+
def get_pre_segment_id(self):
-
+
return int(self.pre_segment)
-
+
def get_post_segment_id(self):
-
+
return int(self.post_segment)
def get_pre_fraction_along(self):
-
+
return float(self.pre_fraction_along)
-
+
def get_post_fraction_along(self):
-
+
return float(self.post_fraction_along)
-
-
+
+
def get_pre_info(self):
-
+
return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '')
-
+
def get_post_info(self):
-
+
return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '')
-
-
+
+
def __str__(self):
-
+
return "Electrical Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ \
", synapse: "+str(self.synapse)
-
-
+
+
''',
class_names=("ElectricalConnection")
)
-
+
METHOD_SPECS+=(elec_connection_cell_ids,)
cont_connection_instance_cell_ids = MethodSpec(name='cont_connection_instance_cell_ids',
source='''\
-
+
def _get_cell_id(self, id_string):
if '[' in id_string:
return int(id_string.split('[')[1].split(']')[0])
else:
return int(id_string.split('/')[2])
-
-
+
+
def __str__(self):
-
+
return "Continuous Connection (Instance based) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ \
", pre comp: "+str(self.pre_component)+", post comp: "+str(self.post_component)
-
-
+
+
''',
class_names=("ContinuousConnectionInstance")
)
-
+
METHOD_SPECS+=(cont_connection_instance_cell_ids,)
cont_connection_instance_w = MethodSpec(name='cont_connection_instance_w',
source='''\
-
+
def get_weight(self):
-
+
return float(self.weight) if self.weight!=None else 1.0
-
+
def __str__(self):
-
+
return "Continuous Connection (Instance based & weight) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ \
", pre comp: "+str(self.pre_component)+", post comp: "+str(self.post_component)+", weight: "+'PERCENTAGE.6f'PERCENTAGEself.get_weight()
-
-
+
+
''',
class_names=("ContinuousConnectionInstanceW")
)
-
+
METHOD_SPECS+=(cont_connection_instance_w,)
cont_connection_cell_ids = MethodSpec(name='cont_connection_cell_ids',
source='''\
-
+
def _get_cell_id(self, id_string):
return int(float(id_string))
-
-
+
+
def get_pre_cell_id(self):
-
+
return self._get_cell_id(self.pre_cell)
-
+
def get_post_cell_id(self):
-
+
return self._get_cell_id(self.post_cell)
-
+
def get_pre_segment_id(self):
-
+
return int(self.pre_segment)
-
+
def get_post_segment_id(self):
-
+
return int(self.post_segment)
def get_pre_fraction_along(self):
-
+
return float(self.pre_fraction_along)
-
+
def get_post_fraction_along(self):
-
+
return float(self.post_fraction_along)
-
-
+
+
def get_pre_info(self):
-
+
return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '')
-
+
def get_post_info(self):
-
+
return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '')
-
-
+
+
def __str__(self):
-
+
return "Continuous Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ \
", pre comp: "+str(self.pre_component)+", post comp: "+str(self.post_component)
-
-
+
+
''',
class_names=("ContinuousConnection")
)
-
+
METHOD_SPECS+=(cont_connection_cell_ids,)
instance = MethodSpec(name='instance',
source='''\
-
+
def __str__(self):
-
+
return "Instance "+str(self.id)+ (" at location: "+str(self.location) if self.location else "")
-
+
def __repr__(self):
-
+
return str(self)
-
+
''',
class_names=("Instance")
)
@@ -526,20 +526,20 @@ def __repr__(self):
source='''\
def _format(self,value):
-
+
if int(value)==value:
return str(int(value))
else:
return 'PERCENTAGE.4f' PERCENTAGE value
-
+
def __str__(self):
-
+
return "("+ self._format(self.x) +", "+ self._format(self.y) +", "+ self._format(self.z) +")"
-
+
def __repr__(self):
-
+
return str(self)
-
+
''',
class_names=("Location")
)
@@ -558,54 +558,54 @@ def _get_cell_id(self, id_string):
return int(id_string.split('/')[2])
def get_target_cell_id(self):
-
+
return self._get_cell_id(self.target)
def get_segment_id(self):
-
+
return int(self.segment_id) if self.segment_id else 0
def get_fraction_along(self):
-
+
return float(self.fraction_along) if self.fraction_along else 0.5
-
+
def __str__(self):
-
+
return "Input "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'PERCENTAGE.6f'PERCENTAGEself.get_fraction_along()+")"
-
+
''',
class_names=(["Input","ExplicitInput"])
)
-
+
METHOD_SPECS+=(input_cell_ids,)
input_w = MethodSpec(name='input_w',
source='''\
-
+
def get_weight(self):
-
+
return float(self.weight) if self.weight!=None else 1.0
def __str__(self):
-
+
return "Input (weight) "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'PERCENTAGE.6f'PERCENTAGEself.get_fraction_along()+"), weight: "+'PERCENTAGE.6f'PERCENTAGEself.get_weight()
-
+
''',
class_names=(["InputW"])
)
-
+
METHOD_SPECS+=(input_w,)
nml_doc_summary = MethodSpec(name='summary',
source='''\
-
+
def summary(self, show_includes=True, show_non_network=True):
-
+
import inspect
-
+
info = "*******************************************************\\n"
info+="* NeuroMLDocument: "+self.id+"\\n*\\n"
post = ""
@@ -627,14 +627,14 @@ def summary(self, show_includes=True, show_non_network=True):
listed.append(str(entry.tag)+" = "+str(entry.value))
info+= str(sorted(listed))+"\\n"
info+= post
-
+
for network in self.networks:
info+="* Network: "+network.id
if network.temperature:
info+=" (temperature: "+network.temperature+")"
info+="\\n*\\n"
tot_pop =0
- tot_cells = 0
+ tot_cells = 0
pop_info = ""
for pop in sorted(network.populations, key=lambda x: x.id):
pop_info+="* "+str(pop)+"\\n"
@@ -645,16 +645,16 @@ def summary(self, show_includes=True, show_non_network=True):
pop_info+="* Locations: ["+str(loc)+", ...]\\n"
if len(pop.properties)>0:
pop_info+="* Properties: "
- for p in pop.properties:
+ for p in pop.properties:
pop_info+=(str(p.tag)+'='+str(p.value)+'; ')
pop_info+="\\n"
-
+
info+="* "+str(tot_cells)+" cells in "+str(tot_pop)+" populations \\n"+pop_info+"*\\n"
-
-
+
+
tot_proj =0
- tot_conns = 0
-
+ tot_conns = 0
+
proj_info = ""
for proj in sorted(network.projections, key=lambda x: x.id):
proj_info+="* "+str(proj)+"\\n"
@@ -665,7 +665,7 @@ def summary(self, show_includes=True, show_non_network=True):
proj_info+="* "+str(len(proj.connections))+" connections: [("+str(proj.connections[0])+"), ...]\\n"
if len(proj.connection_wds)>0:
proj_info+="* "+str(len(proj.connection_wds))+" connections (wd): [("+str(proj.connection_wds[0])+"), ...]\\n"
-
+
for proj in sorted(network.electrical_projections, key=lambda x: x.id):
proj_info+="* Electrical projection: "+proj.id+" from "+proj.presynaptic_population+" to "+proj.postsynaptic_population+"\\n"
tot_proj+=1
@@ -678,7 +678,7 @@ def summary(self, show_includes=True, show_non_network=True):
proj_info+="* "+str(len(proj.electrical_connection_instances))+" connections: [("+str(proj.electrical_connection_instances[0])+"), ...]\\n"
if len(proj.electrical_connection_instance_ws)>0:
proj_info+="* "+str(len(proj.electrical_connection_instance_ws))+" connections: [("+str(proj.electrical_connection_instance_ws[0])+"), ...]\\n"
-
+
for proj in sorted(network.continuous_projections, key=lambda x: x.id):
proj_info+="* Continuous projection: "+proj.id+" from "+proj.presynaptic_population+" to "+proj.postsynaptic_population+"\\n"
tot_proj+=1
@@ -691,9 +691,9 @@ def summary(self, show_includes=True, show_non_network=True):
proj_info+="* "+str(len(proj.continuous_connection_instances))+" connections: [("+str(proj.continuous_connection_instances[0])+"), ...]\\n"
if len(proj.continuous_connection_instance_ws)>0:
proj_info+="* "+str(len(proj.continuous_connection_instance_ws))+" connections (w): [("+str(proj.continuous_connection_instance_ws[0])+"), ...]\\n"
-
+
info+="* "+str(tot_conns)+" connections in "+str(tot_proj)+" projections \\n"+proj_info+"*\\n"
-
+
tot_input_lists = 0
tot_inputs = 0
input_info = ""
@@ -706,22 +706,25 @@ def summary(self, show_includes=True, show_non_network=True):
if len(il.input_ws)>0:
input_info+="* "+str(len(il.input_ws))+" inputs: [("+str(il.input_ws[0])+"), ...]\\n"
tot_inputs+=len(il.input_ws)
-
+
info+="* "+str(tot_inputs)+" inputs in "+str(tot_input_lists)+" input lists \\n"+input_info+"*\\n"
-
-
+
+ for el in network.explicit_inputs:
+ info+="* Explicit input to "+el.target+" of type "+el.input+"\\n*\\n"
+
+
info+="*******************************************************"
-
+
return info
-
+
warn_count = 0
-
+
def get_by_id(self,id):
if len(id)==0:
import inspect
callframe = inspect.getouterframes(inspect.currentframe(), 2)
print('Method: '+ callframe[1][3] + ' is asking for an element with no id...')
-
+
return None
all_ids = []
for ms in self.member_data_items_:
@@ -739,15 +742,15 @@ def get_by_id(self,id):
elif self.warn_count==10:
print_(" - Suppressing further warnings about id not found...")
return None
-
+
def append(self,element):
from neuroml.utils import append_to_element
append_to_element(self,element)
-
+
''',
class_names=("NeuroMLDocument")
)
-
+
METHOD_SPECS+=(nml_doc_summary,)
network_get_by_id = MethodSpec(name='get_by_id',
@@ -771,16 +774,16 @@ def get_by_id(self,id):
elif self.warn_count==10:
print_(" - Suppressing further warnings about id not found...")
return None
-
-
+
+
def __str__(self):
-
+
return "Network "+str(self.id)+" with "+str(len(self.populations))+" population(s)"
-
+
''',
class_names=("Network")
)
-
+
METHOD_SPECS+=(network_get_by_id,)
@@ -790,21 +793,21 @@ def __str__(self):
# Get segment object by its id
def get_segment(self, segment_id):
-
+
for segment in self.morphology.segments:
if segment.id == segment_id:
return segment
-
+
raise Exception("Segment with id "+str(segment_id)+" not found in cell "+str(self.id))
-
- # Get the proximal point of a segment, even the proximal field is None and
- # so the proximal point is on the parent (at a point set by fraction_along)
+
+ # Get the proximal point of a segment, even the proximal field is None and
+ # so the proximal point is on the parent (at a point set by fraction_along)
def get_actual_proximal(self, segment_id):
-
+
segment = self.get_segment(segment_id)
if segment.proximal:
return segment.proximal
-
+
parent = self.get_segment(segment.parent.segments)
fract = float(segment.parent.fraction_along)
if fract==1:
@@ -816,43 +819,43 @@ def get_actual_proximal(self, segment_id):
pp = self.get_actual_proximal(segment.parent.segments)
p = Point3DWithDiam((1-fract)*pp.x+fract*pd.x, (1-fract)*pp.y+fract*pd.y, (1-fract)*pp.z+fract*pd.z)
p.diameter = (1-fract)*pp.diameter+fract*pd.diameter
-
+
return p
-
+
def get_segment_length(self, segment_id):
-
+
segment = self.get_segment(segment_id)
if segment.proximal:
return segment.length
else:
prox = self.get_actual_proximal(segment_id)
-
+
length = segment.distal.distance_to(prox)
-
+
return length
-
+
def get_segment_surface_area(self, segment_id):
-
+
segment = self.get_segment(segment_id)
if segment.proximal:
return segment.surface_area
else:
prox = self.get_actual_proximal(segment_id)
-
+
temp_seg = Segment(distal=segment.distal, proximal=prox)
-
+
return temp_seg.surface_area
-
+
def get_segment_volume(self, segment_id):
-
+
segment = self.get_segment(segment_id)
if segment.proximal:
return segment.volume
else:
prox = self.get_actual_proximal(segment_id)
-
+
temp_seg = Segment(distal=segment.distal, proximal=prox)
-
+
return temp_seg.volume
def get_segment_ids_vs_segments(self):
@@ -862,48 +865,48 @@ def get_segment_ids_vs_segments(self):
segments[segment.id] = segment
return segments
-
+
def get_all_segments_in_group(self,
segment_group,
assume_all_means_all=True):
-
+
if isinstance(segment_group, str):
for sg in self.morphology.segment_groups:
if sg.id == segment_group:
segment_group = sg
- if isinstance(segment_group, str):
-
+ if isinstance(segment_group, str):
+
if assume_all_means_all and segment_group=='all': # i.e. wasn't explicitly defined, but assume it means all segments
return [seg.id for seg in self.morphology.segments]
-
+
raise Exception('No segment group '+segment_group+ ' found in cell '+self.id)
-
+
all_segs = []
-
+
for member in segment_group.members:
if not member.segments in all_segs:
all_segs.append(member.segments)
-
-
+
+
for include in segment_group.includes:
segs_here = self.get_all_segments_in_group(include.segment_groups)
for s in segs_here:
if not s in all_segs:
all_segs.append(s)
-
+
return all_segs
-
- def get_ordered_segments_in_groups(self,
- group_list,
- check_parentage=False,
- include_cumulative_lengths=False,
- include_path_lengths=False,
+
+ def get_ordered_segments_in_groups(self,
+ group_list,
+ check_parentage=False,
+ include_cumulative_lengths=False,
+ include_path_lengths=False,
path_length_metric="Path Length from root"): # Only option supported
unord_segs = {}
other_segs = {}
-
+
if isinstance(group_list, str):
group_list = [group_list]
@@ -911,26 +914,26 @@ def get_ordered_segments_in_groups(self,
for sg in self.morphology.segment_groups:
all_segs_here = self.get_all_segments_in_group(sg)
-
+
if sg.id in group_list:
unord_segs[sg.id] = [segments[s] for s in all_segs_here]
else:
other_segs[sg.id] = [segments[s] for s in all_segs_here]
- ord_segs = {}
+ ord_segs = {}
from operator import attrgetter
- for key in unord_segs.keys():
+ for key in unord_segs.keys():
segs = unord_segs[key]
if len(segs)==1 or len(segs)==0:
ord_segs[key]=segs
else:
- ord_segs[key]=sorted(segs,key=attrgetter('id'),reverse=False)
+ ord_segs[key]=sorted(segs,key=attrgetter('id'),reverse=False)
if check_parentage:
# check parent ordering
-
- for key in ord_segs.keys():
+
+ for key in ord_segs.keys():
existing_ids = []
for s in ord_segs[key]:
if s.id != ord_segs[key][0].id:
@@ -941,74 +944,74 @@ def get_ordered_segments_in_groups(self,
if include_cumulative_lengths or include_path_lengths:
import math
-
+
cumulative_lengths = {}
path_lengths_to_proximal = {}
path_lengths_to_distal = {}
-
- for key in ord_segs.keys():
+
+ for key in ord_segs.keys():
cumulative_lengths[key] = []
path_lengths_to_proximal[key] = {}
path_lengths_to_distal[key] = {}
-
+
tot_len = 0
- for seg in ord_segs[key]:
-
+ for seg in ord_segs[key]:
+
length = self.get_segment_length(seg.id)
-
+
if not seg.parent or not seg.parent.segments in path_lengths_to_distal[key]:
-
+
path_lengths_to_proximal[key][seg.id] = 0
last_seg = seg
par_seg_element = seg.parent
while par_seg_element!=None:
-
+
par_seg = segments[par_seg_element.segments]
d = par_seg.distal
p = par_seg.proximal
-
+
if not p:
par_seg_parent_seg = segments[par_seg.parent.segments]
p = par_seg_parent_seg.distal
-
+
par_length = math.sqrt( (d.x-p.x)**2 + (d.y-p.y)**2 + (d.z-p.z)**2 )
-
+
fract = float(last_seg.parent.fraction_along)
path_lengths_to_proximal[key][seg.id] += par_length*fract
-
+
last_seg = par_seg
par_seg_element = par_seg.parent
-
-
+
+
else:
pd = path_lengths_to_distal[key][seg.parent.segments]
pp = path_lengths_to_proximal[key][seg.parent.segments]
fract = float(seg.parent.fraction_along)
-
+
path_lengths_to_proximal[key][seg.id] = pp + (pd - pp)*fract
-
+
path_lengths_to_distal[key][seg.id] = path_lengths_to_proximal[key][seg.id] + length
-
+
tot_len += length
cumulative_lengths[key].append(tot_len)
-
-
+
+
if include_path_lengths and not include_cumulative_lengths:
-
+
return ord_segs, path_lengths_to_proximal, path_lengths_to_distal
if include_cumulative_lengths and not include_path_lengths:
-
+
return ord_segs, cumulative_lengths
if include_cumulative_lengths and include_path_lengths:
-
+
return ord_segs, cumulative_lengths, path_lengths_to_proximal, path_lengths_to_distal
return ord_segs
-
-
-
+
+
+
def summary(self):
print("*******************************************************")
print("* Cell: "+str(self.id))
@@ -1016,30 +1019,30 @@ def summary(self):
print("* Segments: "+str(len(self.morphology.segments)))
print("* SegmentGroups: "+str(len(self.morphology.segment_groups)))
print("*******************************************************")
-
+
''',
class_names=("Cell")
)
-
+
METHOD_SPECS+=(cell_methods,)
-
+
inserts = {}
inserts['Network'] = '''
-
+
import numpy
-
+
netGroup = h5file.create_group(h5Group, 'network')
netGroup._f_setattr("id", self.id)
netGroup._f_setattr("notes", self.notes)
if self.temperature:
netGroup._f_setattr("temperature", self.temperature)
-
-
+
+
for pop in self.populations:
pop.exportHdf5(h5file, netGroup)
-
+
if len(self.synaptic_connections) > 0:
raise Exception(" not yet supported in HDF5 export")
if len(self.explicit_inputs) > 0:
@@ -1047,29 +1050,29 @@ def summary(self):
for proj in self.projections:
proj.exportHdf5(h5file, netGroup)
-
+
for eproj in self.electrical_projections:
eproj.exportHdf5(h5file, netGroup)
-
+
for cproj in self.continuous_projections:
cproj.exportHdf5(h5file, netGroup)
-
+
for il in self.input_lists:
il.exportHdf5(h5file, netGroup)
-
+
'''
inserts['Population'] = '''
-
+
import numpy
-
+
popGroup = h5file.create_group(h5Group, 'population_'+self.id)
popGroup._f_setattr("id", self.id)
popGroup._f_setattr("component", self.component)
for p in self.properties:
popGroup._f_setattr("property:"+p.tag, p.value)
-
-
+
+
if len(self.instances)>0:
colCount = 3
@@ -1082,7 +1085,7 @@ def summary(self):
a[count,2] = instance.location.z
count=count+1
-
+
popGroup._f_setattr("size", count)
popGroup._f_setattr("type", "populationList")
@@ -1090,92 +1093,92 @@ def summary(self):
array._f_setattr("column_0", "x")
array._f_setattr("column_1", "y")
array._f_setattr("column_2", "z")
-
+
else:
popGroup._f_setattr("size", self.size)
-
+
def get_size(self):
return len(self.instances) if len(self.instances)>0 else (self.size if self.size else 0)
-
+
def __str__(self):
-
+
return "Population: "+str(self.id)+" with "+str( self.get_size() )+" components of type "+(self.component if self.component else "???")
-
+
'''
inserts['Projection'] = '''
-
+
import numpy
-
+
projGroup = h5file.create_group(h5Group, 'projection_'+self.id)
projGroup._f_setattr("id", self.id)
projGroup._f_setattr("type", "projection")
projGroup._f_setattr("presynapticPopulation", self.presynaptic_population)
projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population)
projGroup._f_setattr("synapse", self.synapse)
-
+
#print("Exporting "+str(len(self.connections))+" connections, "+str(len(self.connection_wds))+" connections with weight")
-
+
connection_wds = len(self.connection_wds) > 0
-
+
cols = 2
-
+
extra_cols = {}
-
+
from neuroml.utils import has_segment_fraction_info
-
+
include_segment_fraction = has_segment_fraction_info(self.connections) or has_segment_fraction_info(self.connection_wds)
-
+
if include_segment_fraction:
extra_cols["column_"+str(cols)] = "pre_segment_id"
extra_cols["column_"+str(cols+1)] = "post_segment_id"
extra_cols["column_"+str(cols+2)] = "pre_fraction_along"
extra_cols["column_"+str(cols+3)] = "post_fraction_along"
cols +=4
-
-
+
+
if connection_wds:
extra_cols["column_"+str(cols)] = "weight"
extra_cols["column_"+str(cols+1)] = "delay"
cols+=2
-
+
a = numpy.zeros([len(self.connections)+len(self.connection_wds), cols], numpy.float32)
-
+
count=0
-
+
for connection in self.connections:
####a[count,0] = connection.id
a[count,0] = connection.get_pre_cell_id()
- a[count,1] = connection.get_post_cell_id()
+ a[count,1] = connection.get_post_cell_id()
if include_segment_fraction:
- a[count,2] = connection.pre_segment_id
- a[count,3] = connection.post_segment_id
- a[count,4] = connection.pre_fraction_along
- a[count,5] = connection.post_fraction_along
+ a[count,2] = connection.pre_segment_id
+ a[count,3] = connection.post_segment_id
+ a[count,4] = connection.pre_fraction_along
+ a[count,5] = connection.post_fraction_along
count=count+1
-
+
for connection in self.connection_wds:
###a[count,0] = connection.id
a[count,0] = connection.get_pre_cell_id()
- a[count,1] = connection.get_post_cell_id()
-
+ a[count,1] = connection.get_post_cell_id()
+
if include_segment_fraction:
- a[count,2] = connection.pre_segment_id
- a[count,3] = connection.post_segment_id
- a[count,4] = connection.pre_fraction_along
- a[count,5] = connection.post_fraction_along
-
- a[count,cols-2] = connection.weight
+ a[count,2] = connection.pre_segment_id
+ a[count,3] = connection.post_segment_id
+ a[count,4] = connection.pre_fraction_along
+ a[count,5] = connection.post_fraction_along
+
+ a[count,cols-2] = connection.weight
if 'ms' in connection.delay:
delay = float(connection.delay[:-2].strip())
elif 's' in connection.delay:
delay = float(connection.delay[:-1].strip())*1000.
elif 'us' in connection.delay:
delay = float(connection.delay[:-2].strip())/1e3
-
- a[count,cols-1] = delay
+
+ a[count,cols-1] = delay
count=count+1
-
+
if len(a)>0:
array = h5file.create_carray(projGroup, self.id, obj=a, title="Connections of cells in "+ self.id)
@@ -1185,76 +1188,76 @@ def __str__(self):
for col in extra_cols.keys():
array._f_setattr(col,extra_cols[col])
-
-
+
+
def __str__(self):
return "Projection: "+self.id+" from "+self.presynaptic_population+" to "+self.postsynaptic_population+", synapse: "+self.synapse
-
-
-
+
+
+
'''
inserts['ElectricalProjection'] = '''
-
+
import numpy
-
+
projGroup = h5file.create_group(h5Group, 'projection_'+self.id)
projGroup._f_setattr("id", self.id)
projGroup._f_setattr("type", "electricalProjection")
projGroup._f_setattr("presynapticPopulation", self.presynaptic_population)
projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population)
-
+
syn = self.electrical_connections[0].synapse if len(self.electrical_connections)>0 else \
self.electrical_connection_instances[0].synapse if len(self.electrical_connection_instances)>0 else self.electrical_connection_instance_ws[0].synapse
projGroup._f_setattr("synapse", syn )
-
+
cols = 7
extra_cols = {}
-
+
num_tot = len(self.electrical_connections)+len(self.electrical_connection_instances)+len(self.electrical_connection_instance_ws)
if len(self.electrical_connection_instance_ws)>0:
extra_cols["column_"+str(cols)] = "weight"
cols+=1
-
+
#print("Exporting "+str(num_tot)+" electrical connections")
a = numpy.zeros([num_tot, cols], numpy.float32)
-
+
count=0
-
+
# TODO: optimise for single compartment cells, i.e. where no pre_segment/post_fraction_along etc.
for connection in self.electrical_connections:
a[count,0] = connection.id
a[count,1] = connection.get_pre_cell_id()
- a[count,2] = connection.get_post_cell_id()
- a[count,3] = connection.pre_segment
- a[count,4] = connection.post_segment
- a[count,5] = connection.pre_fraction_along
- a[count,6] = connection.post_fraction_along
+ a[count,2] = connection.get_post_cell_id()
+ a[count,3] = connection.pre_segment
+ a[count,4] = connection.post_segment
+ a[count,5] = connection.pre_fraction_along
+ a[count,6] = connection.post_fraction_along
count=count+1
-
+
for connection in self.electrical_connection_instances:
a[count,0] = connection.id
a[count,1] = connection.get_pre_cell_id()
- a[count,2] = connection.get_post_cell_id()
- a[count,3] = connection.pre_segment
- a[count,4] = connection.post_segment
- a[count,5] = connection.pre_fraction_along
- a[count,6] = connection.post_fraction_along
+ a[count,2] = connection.get_post_cell_id()
+ a[count,3] = connection.pre_segment
+ a[count,4] = connection.post_segment
+ a[count,5] = connection.pre_fraction_along
+ a[count,6] = connection.post_fraction_along
count=count+1
-
+
for connection in self.electrical_connection_instance_ws:
a[count,0] = connection.id
a[count,1] = connection.get_pre_cell_id()
- a[count,2] = connection.get_post_cell_id()
- a[count,3] = connection.pre_segment
- a[count,4] = connection.post_segment
- a[count,5] = connection.pre_fraction_along
- a[count,6] = connection.post_fraction_along
- a[count,7] = connection.get_weight()
+ a[count,2] = connection.get_post_cell_id()
+ a[count,3] = connection.pre_segment
+ a[count,4] = connection.post_segment
+ a[count,5] = connection.pre_fraction_along
+ a[count,6] = connection.post_fraction_along
+ a[count,7] = connection.get_weight()
count=count+1
-
+
array = h5file.create_carray(projGroup, self.id, obj=a, title="Connections of cells in "+ self.id)
-
+
array._f_setattr("column_0", "id")
array._f_setattr("column_1", "pre_cell_id")
array._f_setattr("column_2", "post_cell_id")
@@ -1265,78 +1268,78 @@ def __str__(self):
for col in extra_cols.keys():
array._f_setattr(col,extra_cols[col])
-
+
'''
inserts['ContinuousProjection'] = '''
-
+
import numpy
-
+
projGroup = h5file.create_group(h5Group, 'projection_'+self.id)
projGroup._f_setattr("id", self.id)
projGroup._f_setattr("type", "continuousProjection")
projGroup._f_setattr("presynapticPopulation", self.presynaptic_population)
projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population)
-
+
pre_comp = self.continuous_connections[0].pre_component if len(self.continuous_connections)>0 else \
self.continuous_connection_instances[0].pre_component if len(self.continuous_connection_instances)>0 else self.continuous_connection_instance_ws[0].pre_component
projGroup._f_setattr("preComponent", pre_comp )
post_comp = self.continuous_connections[0].post_component if len(self.continuous_connections)>0 else \
self.continuous_connection_instances[0].post_component if len(self.continuous_connection_instances)>0 else self.continuous_connection_instance_ws[0].post_component
projGroup._f_setattr("postComponent", post_comp )
-
+
cols = 7
extra_cols = {}
-
+
num_tot = len(self.continuous_connections)+len(self.continuous_connection_instances)+len(self.continuous_connection_instance_ws)
-
+
if len(self.continuous_connection_instance_ws)>0:
extra_cols["column_"+str(cols)] = 'weight'
cols+=1
-
+
#print("Exporting "+str(num_tot)+" continuous connections")
a = numpy.zeros([num_tot, cols], numpy.float32)
-
+
count=0
-
+
# TODO: optimise for single compartment cells, i.e. where no pre_segment/post_fraction_along etc.
for connection in self.continuous_connections:
a[count,0] = connection.id
a[count,1] = connection.get_pre_cell_id()
- a[count,2] = connection.get_post_cell_id()
- a[count,3] = connection.pre_segment
- a[count,4] = connection.post_segment
- a[count,5] = connection.pre_fraction_along
- a[count,6] = connection.post_fraction_along
+ a[count,2] = connection.get_post_cell_id()
+ a[count,3] = connection.pre_segment
+ a[count,4] = connection.post_segment
+ a[count,5] = connection.pre_fraction_along
+ a[count,6] = connection.post_fraction_along
count=count+1
-
+
for connection in self.continuous_connection_instances:
a[count,0] = connection.id
a[count,1] = connection.get_pre_cell_id()
- a[count,2] = connection.get_post_cell_id()
- a[count,3] = connection.pre_segment
- a[count,4] = connection.post_segment
- a[count,5] = connection.pre_fraction_along
- a[count,6] = connection.post_fraction_along
+ a[count,2] = connection.get_post_cell_id()
+ a[count,3] = connection.pre_segment
+ a[count,4] = connection.post_segment
+ a[count,5] = connection.pre_fraction_along
+ a[count,6] = connection.post_fraction_along
count=count+1
-
-
+
+
for connection in self.continuous_connection_instance_ws:
a[count,0] = connection.id
a[count,1] = connection.get_pre_cell_id()
- a[count,2] = connection.get_post_cell_id()
- a[count,3] = connection.pre_segment
- a[count,4] = connection.post_segment
- a[count,5] = connection.pre_fraction_along
- a[count,6] = connection.post_fraction_along
- a[count,7] = connection.weight
+ a[count,2] = connection.get_post_cell_id()
+ a[count,3] = connection.pre_segment
+ a[count,4] = connection.post_segment
+ a[count,5] = connection.pre_fraction_along
+ a[count,6] = connection.post_fraction_along
+ a[count,7] = connection.weight
count=count+1
-
-
+
+
array = h5file.create_carray(projGroup, self.id, obj=a, title="Connections of cells in "+ self.id)
-
+
array._f_setattr("column_0", "id")
array._f_setattr("column_1", "pre_cell_id")
array._f_setattr("column_2", "post_cell_id")
@@ -1346,42 +1349,42 @@ def __str__(self):
array._f_setattr("column_6", "post_fraction_along")
for k in extra_cols:
array._f_setattr(k, extra_cols[k])
-
-
+
+
'''
inserts['InputList'] = '''
-
+
import numpy
-
+
ilGroup = h5file.create_group(h5Group, 'inputList_'+self.id)
ilGroup._f_setattr("id", self.id)
ilGroup._f_setattr("component", self.component)
ilGroup._f_setattr("population", self.populations)
-
+
cols = 4
-
+
extra_cols = {}
-
+
num_tot = len(self.input)+len(self.input_ws)
-
+
if len(self.input_ws)>0:
extra_cols["column_"+str(cols)] = 'weight'
cols+=1
-
+
#print("Exporting "+str(num_tot)+" inputs")
a = numpy.zeros([num_tot, cols], numpy.float32)
-
+
count=0
-
+
for input in self.input:
a[count,0] = input.id
a[count,1] = input.get_target_cell_id()
a[count,2] = input.get_segment_id()
a[count,3] = input.get_fraction_along()
count+=1
-
+
for input in self.input_ws:
a[count,0] = input.id
a[count,1] = input.get_target_cell_id()
@@ -1389,24 +1392,24 @@ def __str__(self):
a[count,3] = input.get_fraction_along()
a[count,4] = input.get_weight()
count+=1
-
+
array = h5file.create_carray(ilGroup, self.id, obj=a, title="Locations of inputs in "+ self.id)
-
+
array._f_setattr("column_0", "id")
array._f_setattr("column_1", "target_cell_id")
array._f_setattr("column_2", "segment_id")
array._f_setattr("column_3", "fraction_along")
for k in extra_cols:
array._f_setattr(k, extra_cols[k])
-
+
def __str__(self):
-
+
return "Input list: "+self.id+" to "+self.populations+", component "+self.component
-
+
'''
-
+
for insert in inserts.keys():
ms = MethodSpec(name='exportHdf5',
source='''\
diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py
index e1d0fc06..90580e43 100644
--- a/neuroml/nml/nml.py
+++ b/neuroml/nml/nml.py
@@ -2,11 +2,12 @@
# -*- coding: utf-8 -*-
#
-# Generated Mon Feb 15 12:34:31 2021 by generateDS.py version 2.30.11.
-# Python 2.7.16 |Anaconda, Inc.| (default, Mar 14 2019, 21:00:58) [GCC 7.3.0]
+# Generated Mon Feb 15 14:02:24 2021 by generateDS.py version 2.30.11.
+# Python 2.7.15 |Anaconda, Inc.| (default, Oct 23 2018, 13:35:16) [GCC 4.2.1 Compatible Clang 4.0.1 (tags/RELEASE_401/final)]
#
# Command line options:
# ('-o', 'nml.py')
+# ('-f', '')
# ('--use-getter-setter', 'none')
# ('--silence', '')
# ('--user-methods', 'helper_methods')
@@ -15,7 +16,7 @@
# NeuroML_v2.1.xsd
#
# Command line:
-# /home/padraig/anaconda2/bin/generateDS.py -o "nml.py" --use-getter-setter="none" --silence --user-methods="helper_methods" NeuroML_v2.1.xsd
+# /Users/padraig/anaconda/envs/py27/bin/generateDS.py -o "nml.py" -f --use-getter-setter="none" --silence --user-methods="helper_methods" NeuroML_v2.1.xsd
#
# Current working directory (os.getcwd()):
# nml
@@ -3488,13 +3489,13 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
def __str__(self):
-
+
return "("+str(self.x)+", "+str(self.y)+", "+str(self.z)+"), diam "+str(self.diameter)+"um"
-
+
def __repr__(self):
-
+
return str(self)
-
+
def distance_to(self, other_3d_point):
a_x = self.x
a_y = self.y
@@ -3506,7 +3507,7 @@ def distance_to(self, other_3d_point):
distance = ((a_x-b_x)**2 + (a_y-b_y)**2 + (a_z-b_z)**2)**(0.5)
return distance
-
+
# end class Point3DWithDiam
@@ -5699,15 +5700,15 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
self.location = obj_
obj_.original_tagname_ = 'location'
-
+
def __str__(self):
-
+
return "Instance "+str(self.id)+ (" at location: "+str(self.location) if self.location else "")
-
+
def __repr__(self):
-
+
return str(self)
-
+
# end class Instance
@@ -5808,20 +5809,20 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
def _format(self,value):
-
+
if int(value)==value:
return str(int(value))
else:
return '%.4f' % value
-
+
def __str__(self):
-
+
return "("+ self._format(self.x) +", "+ self._format(self.y) +", "+ self._format(self.z) +")"
-
+
def __repr__(self):
-
+
return str(self)
-
+
# end class Location
@@ -6031,21 +6032,21 @@ def _get_cell_id(self, id_string):
return int(id_string.split('/')[2])
def get_target_cell_id(self):
-
+
return self._get_cell_id(self.target)
def get_segment_id(self):
-
+
return int(self.segment_id) if self.segment_id else 0
def get_fraction_along(self):
-
+
return float(self.fraction_along) if self.fraction_along else 0.5
-
+
def __str__(self):
-
+
return "Input "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'%.6f'%self.get_fraction_along()+")"
-
+
# end class ExplicitInput
@@ -6206,21 +6207,21 @@ def _get_cell_id(self, id_string):
return int(id_string.split('/')[2])
def get_target_cell_id(self):
-
+
return self._get_cell_id(self.target)
def get_segment_id(self):
-
+
return int(self.segment_id) if self.segment_id else 0
def get_fraction_along(self):
-
+
return float(self.fraction_along) if self.fraction_along else 0.5
-
+
def __str__(self):
-
+
return "Input "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'%.6f'%self.get_fraction_along()+")"
-
+
# end class Input
@@ -6302,15 +6303,15 @@ def buildAttributes(self, node, attrs, already_processed):
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(InputW, self).buildChildren(child_, node, nodeName_, True)
pass
-
+
def get_weight(self):
-
+
return float(self.weight) if self.weight!=None else 1.0
def __str__(self):
-
+
return "Input (weight) "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'%.6f'%self.get_fraction_along()+"), weight: "+'%.6f'%self.get_weight()
-
+
# end class InputW
@@ -6974,36 +6975,36 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
def exportHdf5(self, h5file, h5Group):
#print("Exporting InputList: "+str(self.id)+" as HDF5")
-
+
import numpy
-
+
ilGroup = h5file.create_group(h5Group, 'inputList_'+self.id)
ilGroup._f_setattr("id", self.id)
ilGroup._f_setattr("component", self.component)
ilGroup._f_setattr("population", self.populations)
-
+
cols = 4
-
+
extra_cols = {}
-
+
num_tot = len(self.input)+len(self.input_ws)
-
+
if len(self.input_ws)>0:
extra_cols["column_"+str(cols)] = 'weight'
cols+=1
-
+
#print("Exporting "+str(num_tot)+" inputs")
a = numpy.zeros([num_tot, cols], numpy.float32)
-
+
count=0
-
+
for input in self.input:
a[count,0] = input.id
a[count,1] = input.get_target_cell_id()
a[count,2] = input.get_segment_id()
a[count,3] = input.get_fraction_along()
count+=1
-
+
for input in self.input_ws:
a[count,0] = input.id
a[count,1] = input.get_target_cell_id()
@@ -7011,20 +7012,20 @@ def exportHdf5(self, h5file, h5Group):
a[count,3] = input.get_fraction_along()
a[count,4] = input.get_weight()
count+=1
-
+
array = h5file.create_carray(ilGroup, self.id, obj=a, title="Locations of inputs in "+ self.id)
-
+
array._f_setattr("column_0", "id")
array._f_setattr("column_1", "target_cell_id")
array._f_setattr("column_2", "segment_id")
array._f_setattr("column_3", "fraction_along")
for k in extra_cols:
array._f_setattr(k, extra_cols[k])
-
+
def __str__(self):
-
+
return "Input list: "+self.id+" to "+self.populations+", component "+self.component
-
+
# end class InputList
@@ -7464,16 +7465,16 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
def exportHdf5(self, h5file, h5Group):
#print("Exporting Population: "+str(self.id)+" as HDF5")
-
+
import numpy
-
+
popGroup = h5file.create_group(h5Group, 'population_'+self.id)
popGroup._f_setattr("id", self.id)
popGroup._f_setattr("component", self.component)
for p in self.properties:
popGroup._f_setattr("property:"+p.tag, p.value)
-
-
+
+
if len(self.instances)>0:
colCount = 3
@@ -7486,7 +7487,7 @@ def exportHdf5(self, h5file, h5Group):
a[count,2] = instance.location.z
count=count+1
-
+
popGroup._f_setattr("size", count)
popGroup._f_setattr("type", "populationList")
@@ -7494,17 +7495,17 @@ def exportHdf5(self, h5file, h5Group):
array._f_setattr("column_0", "x")
array._f_setattr("column_1", "y")
array._f_setattr("column_2", "z")
-
+
else:
popGroup._f_setattr("size", self.size)
-
+
def get_size(self):
return len(self.instances) if len(self.instances)>0 else (self.size if self.size else 0)
-
+
def __str__(self):
-
+
return "Population: "+str(self.id)+" with "+str( self.get_size() )+" components of type "+(self.component if self.component else "???")
-
+
# end class Population
@@ -7977,29 +7978,29 @@ def get_by_id(self,id):
elif self.warn_count==10:
print_(" - Suppressing further warnings about id not found...")
return None
-
-
+
+
def __str__(self):
-
+
return "Network "+str(self.id)+" with "+str(len(self.populations))+" population(s)"
-
+
def exportHdf5(self, h5file, h5Group):
#print("Exporting Network: "+str(self.id)+" as HDF5")
-
+
import numpy
-
+
netGroup = h5file.create_group(h5Group, 'network')
netGroup._f_setattr("id", self.id)
netGroup._f_setattr("notes", self.notes)
if self.temperature:
netGroup._f_setattr("temperature", self.temperature)
-
-
+
+
for pop in self.populations:
pop.exportHdf5(h5file, netGroup)
-
+
if len(self.synaptic_connections) > 0:
raise Exception(" not yet supported in HDF5 export")
if len(self.explicit_inputs) > 0:
@@ -8007,16 +8008,16 @@ def exportHdf5(self, h5file, h5Group):
for proj in self.projections:
proj.exportHdf5(h5file, netGroup)
-
+
for eproj in self.electrical_projections:
eproj.exportHdf5(h5file, netGroup)
-
+
for cproj in self.continuous_projections:
cproj.exportHdf5(h5file, netGroup)
-
+
for il in self.input_lists:
il.exportHdf5(h5file, netGroup)
-
+
# end class Network
@@ -12223,15 +12224,15 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_.original_tagname_ = 'inhomogeneousParameter'
super(SegmentGroup, self).buildChildren(child_, node, nodeName_, True)
-
+
def __str__(self):
-
+
return "SegmentGroup: "+str(self.id)+", "+str(len(self.members))+" member(s), "+str(len(self.includes))+" included group(s)"
-
+
def __repr__(self):
-
+
return str(self)
-
+
# end class SegmentGroup
@@ -12343,14 +12344,14 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(Segment, self).buildChildren(child_, node, nodeName_, True)
@property
def length(self):
-
+
if self.proximal==None:
raise Exception('Cannot get length of segment '+str(self.id)+' using the length property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_length(segment_id) on the cell instead.')
-
+
prox_x = self.proximal.x
prox_y = self.proximal.y
prox_z = self.proximal.z
-
+
dist_x = self.distal.x
dist_y = self.distal.y
dist_z = self.distal.z
@@ -12358,32 +12359,32 @@ def length(self):
length = ((prox_x-dist_x)**2 + (prox_y-dist_y)**2 + (prox_z-dist_z)**2)**(0.5)
return length
-
+
def __str__(self):
-
+
return ""
-
+
def __repr__(self):
-
+
return str(self)
-
+
@property
def volume(self):
-
+
from math import pi
if self.proximal==None:
raise Exception('Cannot get volume of segment '+str(self.id)+' using the volume property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_volume(segment_id) on the cell instead.')
-
+
prox_rad = self.proximal.diameter/2.0
dist_rad = self.distal.diameter/2.0
-
+
if self.proximal.x == self.distal.x and self.proximal.y == self.distal.y and self.proximal.z == self.distal.z:
-
+
if prox_rad!=dist_rad:
raise Exception('Cannot get volume of segment '+str(self.id)+'. The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the volume calculation ambiguous.')
-
+
return 4.0/3 * pi * prox_rad**3
-
+
length = self.length
volume = (pi/3)*length*(prox_rad**2+dist_rad**2+prox_rad*dist_rad)
@@ -12394,24 +12395,24 @@ def volume(self):
def surface_area(self):
from math import pi
from math import sqrt
-
+
if self.proximal==None:
raise Exception('Cannot get surface area of segment '+str(self.id)+' using the surface_area property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_surface_area(segment_id) on the cell instead.')
-
+
prox_rad = self.proximal.diameter/2.0
dist_rad = self.distal.diameter/2.0
-
+
if self.proximal.x == self.distal.x and self.proximal.y == self.distal.y and self.proximal.z == self.distal.z:
-
+
if prox_rad!=dist_rad:
raise Exception('Cannot get surface area of segment '+str(self.id)+'. The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the surface area calculation ambiguous.')
-
+
return 4.0 * pi * prox_rad**2
-
+
length = self.length
surface_area = pi*(prox_rad+dist_rad)*sqrt((prox_rad-dist_rad)**2+length**2)
-
+
return surface_area
# end class Segment
@@ -16050,11 +16051,11 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_.original_tagname_ = 'ComponentType'
super(NeuroMLDocument, self).buildChildren(child_, node, nodeName_, True)
-
+
def summary(self, show_includes=True, show_non_network=True):
-
+
import inspect
-
+
info = "*******************************************************\n"
info+="* NeuroMLDocument: "+self.id+"\n*\n"
post = ""
@@ -16076,14 +16077,14 @@ def summary(self, show_includes=True, show_non_network=True):
listed.append(str(entry.tag)+" = "+str(entry.value))
info+= str(sorted(listed))+"\n"
info+= post
-
+
for network in self.networks:
info+="* Network: "+network.id
if network.temperature:
info+=" (temperature: "+network.temperature+")"
info+="\n*\n"
tot_pop =0
- tot_cells = 0
+ tot_cells = 0
pop_info = ""
for pop in sorted(network.populations, key=lambda x: x.id):
pop_info+="* "+str(pop)+"\n"
@@ -16094,16 +16095,16 @@ def summary(self, show_includes=True, show_non_network=True):
pop_info+="* Locations: ["+str(loc)+", ...]\n"
if len(pop.properties)>0:
pop_info+="* Properties: "
- for p in pop.properties:
+ for p in pop.properties:
pop_info+=(str(p.tag)+'='+str(p.value)+'; ')
pop_info+="\n"
-
+
info+="* "+str(tot_cells)+" cells in "+str(tot_pop)+" populations \n"+pop_info+"*\n"
-
-
+
+
tot_proj =0
- tot_conns = 0
-
+ tot_conns = 0
+
proj_info = ""
for proj in sorted(network.projections, key=lambda x: x.id):
proj_info+="* "+str(proj)+"\n"
@@ -16114,7 +16115,7 @@ def summary(self, show_includes=True, show_non_network=True):
proj_info+="* "+str(len(proj.connections))+" connections: [("+str(proj.connections[0])+"), ...]\n"
if len(proj.connection_wds)>0:
proj_info+="* "+str(len(proj.connection_wds))+" connections (wd): [("+str(proj.connection_wds[0])+"), ...]\n"
-
+
for proj in sorted(network.electrical_projections, key=lambda x: x.id):
proj_info+="* Electrical projection: "+proj.id+" from "+proj.presynaptic_population+" to "+proj.postsynaptic_population+"\n"
tot_proj+=1
@@ -16127,7 +16128,7 @@ def summary(self, show_includes=True, show_non_network=True):
proj_info+="* "+str(len(proj.electrical_connection_instances))+" connections: [("+str(proj.electrical_connection_instances[0])+"), ...]\n"
if len(proj.electrical_connection_instance_ws)>0:
proj_info+="* "+str(len(proj.electrical_connection_instance_ws))+" connections: [("+str(proj.electrical_connection_instance_ws[0])+"), ...]\n"
-
+
for proj in sorted(network.continuous_projections, key=lambda x: x.id):
proj_info+="* Continuous projection: "+proj.id+" from "+proj.presynaptic_population+" to "+proj.postsynaptic_population+"\n"
tot_proj+=1
@@ -16140,9 +16141,9 @@ def summary(self, show_includes=True, show_non_network=True):
proj_info+="* "+str(len(proj.continuous_connection_instances))+" connections: [("+str(proj.continuous_connection_instances[0])+"), ...]\n"
if len(proj.continuous_connection_instance_ws)>0:
proj_info+="* "+str(len(proj.continuous_connection_instance_ws))+" connections (w): [("+str(proj.continuous_connection_instance_ws[0])+"), ...]\n"
-
+
info+="* "+str(tot_conns)+" connections in "+str(tot_proj)+" projections \n"+proj_info+"*\n"
-
+
tot_input_lists = 0
tot_inputs = 0
input_info = ""
@@ -16155,22 +16156,25 @@ def summary(self, show_includes=True, show_non_network=True):
if len(il.input_ws)>0:
input_info+="* "+str(len(il.input_ws))+" inputs: [("+str(il.input_ws[0])+"), ...]\n"
tot_inputs+=len(il.input_ws)
-
+
info+="* "+str(tot_inputs)+" inputs in "+str(tot_input_lists)+" input lists \n"+input_info+"*\n"
-
-
+
+ for el in network.explicit_inputs:
+ info+="* Explicit input to "+el.target+" of type "+el.input+"\n*\n"
+
+
info+="*******************************************************"
-
+
return info
-
+
warn_count = 0
-
+
def get_by_id(self,id):
if len(id)==0:
import inspect
callframe = inspect.getouterframes(inspect.currentframe(), 2)
print('Method: '+ callframe[1][3] + ' is asking for an element with no id...')
-
+
return None
all_ids = []
for ms in self.member_data_items_:
@@ -16188,11 +16192,11 @@ def get_by_id(self,id):
elif self.warn_count==10:
print_(" - Suppressing further warnings about id not found...")
return None
-
+
def append(self,element):
from neuroml.utils import append_to_element
append_to_element(self,element)
-
+
# end class NeuroMLDocument
@@ -16534,70 +16538,70 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
def exportHdf5(self, h5file, h5Group):
#print("Exporting ContinuousProjection: "+str(self.id)+" as HDF5")
-
+
import numpy
-
+
projGroup = h5file.create_group(h5Group, 'projection_'+self.id)
projGroup._f_setattr("id", self.id)
projGroup._f_setattr("type", "continuousProjection")
projGroup._f_setattr("presynapticPopulation", self.presynaptic_population)
projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population)
-
+
pre_comp = self.continuous_connections[0].pre_component if len(self.continuous_connections)>0 else self.continuous_connection_instances[0].pre_component if len(self.continuous_connection_instances)>0 else self.continuous_connection_instance_ws[0].pre_component
projGroup._f_setattr("preComponent", pre_comp )
post_comp = self.continuous_connections[0].post_component if len(self.continuous_connections)>0 else self.continuous_connection_instances[0].post_component if len(self.continuous_connection_instances)>0 else self.continuous_connection_instance_ws[0].post_component
projGroup._f_setattr("postComponent", post_comp )
-
+
cols = 7
extra_cols = {}
-
+
num_tot = len(self.continuous_connections)+len(self.continuous_connection_instances)+len(self.continuous_connection_instance_ws)
-
+
if len(self.continuous_connection_instance_ws)>0:
extra_cols["column_"+str(cols)] = 'weight'
cols+=1
-
+
#print("Exporting "+str(num_tot)+" continuous connections")
a = numpy.zeros([num_tot, cols], numpy.float32)
-
+
count=0
-
+
# TODO: optimise for single compartment cells, i.e. where no pre_segment/post_fraction_along etc.
for connection in self.continuous_connections:
a[count,0] = connection.id
a[count,1] = connection.get_pre_cell_id()
- a[count,2] = connection.get_post_cell_id()
- a[count,3] = connection.pre_segment
- a[count,4] = connection.post_segment
- a[count,5] = connection.pre_fraction_along
- a[count,6] = connection.post_fraction_along
+ a[count,2] = connection.get_post_cell_id()
+ a[count,3] = connection.pre_segment
+ a[count,4] = connection.post_segment
+ a[count,5] = connection.pre_fraction_along
+ a[count,6] = connection.post_fraction_along
count=count+1
-
+
for connection in self.continuous_connection_instances:
a[count,0] = connection.id
a[count,1] = connection.get_pre_cell_id()
- a[count,2] = connection.get_post_cell_id()
- a[count,3] = connection.pre_segment
- a[count,4] = connection.post_segment
- a[count,5] = connection.pre_fraction_along
- a[count,6] = connection.post_fraction_along
+ a[count,2] = connection.get_post_cell_id()
+ a[count,3] = connection.pre_segment
+ a[count,4] = connection.post_segment
+ a[count,5] = connection.pre_fraction_along
+ a[count,6] = connection.post_fraction_along
count=count+1
-
-
+
+
for connection in self.continuous_connection_instance_ws:
a[count,0] = connection.id
a[count,1] = connection.get_pre_cell_id()
- a[count,2] = connection.get_post_cell_id()
- a[count,3] = connection.pre_segment
- a[count,4] = connection.post_segment
- a[count,5] = connection.pre_fraction_along
- a[count,6] = connection.post_fraction_along
- a[count,7] = connection.weight
+ a[count,2] = connection.get_post_cell_id()
+ a[count,3] = connection.pre_segment
+ a[count,4] = connection.post_segment
+ a[count,5] = connection.pre_fraction_along
+ a[count,6] = connection.post_fraction_along
+ a[count,7] = connection.weight
count=count+1
-
-
+
+
array = h5file.create_carray(projGroup, self.id, obj=a, title="Connections of cells in "+ self.id)
-
+
array._f_setattr("column_0", "id")
array._f_setattr("column_1", "pre_cell_id")
array._f_setattr("column_2", "post_cell_id")
@@ -16607,8 +16611,8 @@ def exportHdf5(self, h5file, h5Group):
array._f_setattr("column_6", "post_fraction_along")
for k in extra_cols:
array._f_setattr(k, extra_cols[k])
-
-
+
+
# end class ContinuousProjection
@@ -16727,65 +16731,65 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
def exportHdf5(self, h5file, h5Group):
#print("Exporting ElectricalProjection: "+str(self.id)+" as HDF5")
-
+
import numpy
-
+
projGroup = h5file.create_group(h5Group, 'projection_'+self.id)
projGroup._f_setattr("id", self.id)
projGroup._f_setattr("type", "electricalProjection")
projGroup._f_setattr("presynapticPopulation", self.presynaptic_population)
projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population)
-
+
syn = self.electrical_connections[0].synapse if len(self.electrical_connections)>0 else self.electrical_connection_instances[0].synapse if len(self.electrical_connection_instances)>0 else self.electrical_connection_instance_ws[0].synapse
projGroup._f_setattr("synapse", syn )
-
+
cols = 7
extra_cols = {}
-
+
num_tot = len(self.electrical_connections)+len(self.electrical_connection_instances)+len(self.electrical_connection_instance_ws)
if len(self.electrical_connection_instance_ws)>0:
extra_cols["column_"+str(cols)] = "weight"
cols+=1
-
+
#print("Exporting "+str(num_tot)+" electrical connections")
a = numpy.zeros([num_tot, cols], numpy.float32)
-
+
count=0
-
+
# TODO: optimise for single compartment cells, i.e. where no pre_segment/post_fraction_along etc.
for connection in self.electrical_connections:
a[count,0] = connection.id
a[count,1] = connection.get_pre_cell_id()
- a[count,2] = connection.get_post_cell_id()
- a[count,3] = connection.pre_segment
- a[count,4] = connection.post_segment
- a[count,5] = connection.pre_fraction_along
- a[count,6] = connection.post_fraction_along
+ a[count,2] = connection.get_post_cell_id()
+ a[count,3] = connection.pre_segment
+ a[count,4] = connection.post_segment
+ a[count,5] = connection.pre_fraction_along
+ a[count,6] = connection.post_fraction_along
count=count+1
-
+
for connection in self.electrical_connection_instances:
a[count,0] = connection.id
a[count,1] = connection.get_pre_cell_id()
- a[count,2] = connection.get_post_cell_id()
- a[count,3] = connection.pre_segment
- a[count,4] = connection.post_segment
- a[count,5] = connection.pre_fraction_along
- a[count,6] = connection.post_fraction_along
+ a[count,2] = connection.get_post_cell_id()
+ a[count,3] = connection.pre_segment
+ a[count,4] = connection.post_segment
+ a[count,5] = connection.pre_fraction_along
+ a[count,6] = connection.post_fraction_along
count=count+1
-
+
for connection in self.electrical_connection_instance_ws:
a[count,0] = connection.id
a[count,1] = connection.get_pre_cell_id()
- a[count,2] = connection.get_post_cell_id()
- a[count,3] = connection.pre_segment
- a[count,4] = connection.post_segment
- a[count,5] = connection.pre_fraction_along
- a[count,6] = connection.post_fraction_along
- a[count,7] = connection.get_weight()
+ a[count,2] = connection.get_post_cell_id()
+ a[count,3] = connection.pre_segment
+ a[count,4] = connection.post_segment
+ a[count,5] = connection.pre_fraction_along
+ a[count,6] = connection.post_fraction_along
+ a[count,7] = connection.get_weight()
count=count+1
-
+
array = h5file.create_carray(projGroup, self.id, obj=a, title="Connections of cells in "+ self.id)
-
+
array._f_setattr("column_0", "id")
array._f_setattr("column_1", "pre_cell_id")
array._f_setattr("column_2", "post_cell_id")
@@ -16796,7 +16800,7 @@ def exportHdf5(self, h5file, h5Group):
for col in extra_cols.keys():
array._f_setattr(col,extra_cols[col])
-
+
# end class ElectricalProjection
@@ -17244,78 +17248,78 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
def exportHdf5(self, h5file, h5Group):
#print("Exporting Projection: "+str(self.id)+" as HDF5")
-
+
import numpy
-
+
projGroup = h5file.create_group(h5Group, 'projection_'+self.id)
projGroup._f_setattr("id", self.id)
projGroup._f_setattr("type", "projection")
projGroup._f_setattr("presynapticPopulation", self.presynaptic_population)
projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population)
projGroup._f_setattr("synapse", self.synapse)
-
+
#print("Exporting "+str(len(self.connections))+" connections, "+str(len(self.connection_wds))+" connections with weight")
-
+
connection_wds = len(self.connection_wds) > 0
-
+
cols = 2
-
+
extra_cols = {}
-
+
from neuroml.utils import has_segment_fraction_info
-
+
include_segment_fraction = has_segment_fraction_info(self.connections) or has_segment_fraction_info(self.connection_wds)
-
+
if include_segment_fraction:
extra_cols["column_"+str(cols)] = "pre_segment_id"
extra_cols["column_"+str(cols+1)] = "post_segment_id"
extra_cols["column_"+str(cols+2)] = "pre_fraction_along"
extra_cols["column_"+str(cols+3)] = "post_fraction_along"
cols +=4
-
-
+
+
if connection_wds:
extra_cols["column_"+str(cols)] = "weight"
extra_cols["column_"+str(cols+1)] = "delay"
cols+=2
-
+
a = numpy.zeros([len(self.connections)+len(self.connection_wds), cols], numpy.float32)
-
+
count=0
-
+
for connection in self.connections:
####a[count,0] = connection.id
a[count,0] = connection.get_pre_cell_id()
- a[count,1] = connection.get_post_cell_id()
+ a[count,1] = connection.get_post_cell_id()
if include_segment_fraction:
- a[count,2] = connection.pre_segment_id
- a[count,3] = connection.post_segment_id
- a[count,4] = connection.pre_fraction_along
- a[count,5] = connection.post_fraction_along
+ a[count,2] = connection.pre_segment_id
+ a[count,3] = connection.post_segment_id
+ a[count,4] = connection.pre_fraction_along
+ a[count,5] = connection.post_fraction_along
count=count+1
-
+
for connection in self.connection_wds:
###a[count,0] = connection.id
a[count,0] = connection.get_pre_cell_id()
- a[count,1] = connection.get_post_cell_id()
-
+ a[count,1] = connection.get_post_cell_id()
+
if include_segment_fraction:
- a[count,2] = connection.pre_segment_id
- a[count,3] = connection.post_segment_id
- a[count,4] = connection.pre_fraction_along
- a[count,5] = connection.post_fraction_along
-
- a[count,cols-2] = connection.weight
+ a[count,2] = connection.pre_segment_id
+ a[count,3] = connection.post_segment_id
+ a[count,4] = connection.pre_fraction_along
+ a[count,5] = connection.post_fraction_along
+
+ a[count,cols-2] = connection.weight
if 'ms' in connection.delay:
delay = float(connection.delay[:-2].strip())
elif 's' in connection.delay:
delay = float(connection.delay[:-1].strip())*1000.
elif 'us' in connection.delay:
delay = float(connection.delay[:-2].strip())/1e3
-
- a[count,cols-1] = delay
+
+ a[count,cols-1] = delay
count=count+1
-
+
if len(a)>0:
array = h5file.create_carray(projGroup, self.id, obj=a, title="Connections of cells in "+ self.id)
@@ -17325,13 +17329,13 @@ def exportHdf5(self, h5file, h5Group):
for col in extra_cols.keys():
array._f_setattr(col,extra_cols[col])
-
-
+
+
def __str__(self):
return "Projection: "+self.id+" from "+self.presynaptic_population+" to "+self.postsynaptic_population+", synapse: "+self.synapse
-
-
-
+
+
+
# end class Projection
@@ -17769,21 +17773,21 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
# Get segment object by its id
def get_segment(self, segment_id):
-
+
for segment in self.morphology.segments:
if segment.id == segment_id:
return segment
-
+
raise Exception("Segment with id "+str(segment_id)+" not found in cell "+str(self.id))
-
- # Get the proximal point of a segment, even the proximal field is None and
- # so the proximal point is on the parent (at a point set by fraction_along)
+
+ # Get the proximal point of a segment, even the proximal field is None and
+ # so the proximal point is on the parent (at a point set by fraction_along)
def get_actual_proximal(self, segment_id):
-
+
segment = self.get_segment(segment_id)
if segment.proximal:
return segment.proximal
-
+
parent = self.get_segment(segment.parent.segments)
fract = float(segment.parent.fraction_along)
if fract==1:
@@ -17795,43 +17799,43 @@ def get_actual_proximal(self, segment_id):
pp = self.get_actual_proximal(segment.parent.segments)
p = Point3DWithDiam((1-fract)*pp.x+fract*pd.x, (1-fract)*pp.y+fract*pd.y, (1-fract)*pp.z+fract*pd.z)
p.diameter = (1-fract)*pp.diameter+fract*pd.diameter
-
+
return p
-
+
def get_segment_length(self, segment_id):
-
+
segment = self.get_segment(segment_id)
if segment.proximal:
return segment.length
else:
prox = self.get_actual_proximal(segment_id)
-
+
length = segment.distal.distance_to(prox)
-
+
return length
-
+
def get_segment_surface_area(self, segment_id):
-
+
segment = self.get_segment(segment_id)
if segment.proximal:
return segment.surface_area
else:
prox = self.get_actual_proximal(segment_id)
-
+
temp_seg = Segment(distal=segment.distal, proximal=prox)
-
+
return temp_seg.surface_area
-
+
def get_segment_volume(self, segment_id):
-
+
segment = self.get_segment(segment_id)
if segment.proximal:
return segment.volume
else:
prox = self.get_actual_proximal(segment_id)
-
+
temp_seg = Segment(distal=segment.distal, proximal=prox)
-
+
return temp_seg.volume
def get_segment_ids_vs_segments(self):
@@ -17841,48 +17845,48 @@ def get_segment_ids_vs_segments(self):
segments[segment.id] = segment
return segments
-
+
def get_all_segments_in_group(self,
segment_group,
assume_all_means_all=True):
-
+
if isinstance(segment_group, str):
for sg in self.morphology.segment_groups:
if sg.id == segment_group:
segment_group = sg
- if isinstance(segment_group, str):
-
+ if isinstance(segment_group, str):
+
if assume_all_means_all and segment_group=='all': # i.e. wasn't explicitly defined, but assume it means all segments
return [seg.id for seg in self.morphology.segments]
-
+
raise Exception('No segment group '+segment_group+ ' found in cell '+self.id)
-
+
all_segs = []
-
+
for member in segment_group.members:
if not member.segments in all_segs:
all_segs.append(member.segments)
-
-
+
+
for include in segment_group.includes:
segs_here = self.get_all_segments_in_group(include.segment_groups)
for s in segs_here:
if not s in all_segs:
all_segs.append(s)
-
+
return all_segs
-
- def get_ordered_segments_in_groups(self,
- group_list,
- check_parentage=False,
- include_cumulative_lengths=False,
- include_path_lengths=False,
+
+ def get_ordered_segments_in_groups(self,
+ group_list,
+ check_parentage=False,
+ include_cumulative_lengths=False,
+ include_path_lengths=False,
path_length_metric="Path Length from root"): # Only option supported
unord_segs = {}
other_segs = {}
-
+
if isinstance(group_list, str):
group_list = [group_list]
@@ -17890,26 +17894,26 @@ def get_ordered_segments_in_groups(self,
for sg in self.morphology.segment_groups:
all_segs_here = self.get_all_segments_in_group(sg)
-
+
if sg.id in group_list:
unord_segs[sg.id] = [segments[s] for s in all_segs_here]
else:
other_segs[sg.id] = [segments[s] for s in all_segs_here]
- ord_segs = {}
+ ord_segs = {}
from operator import attrgetter
- for key in unord_segs.keys():
+ for key in unord_segs.keys():
segs = unord_segs[key]
if len(segs)==1 or len(segs)==0:
ord_segs[key]=segs
else:
- ord_segs[key]=sorted(segs,key=attrgetter('id'),reverse=False)
+ ord_segs[key]=sorted(segs,key=attrgetter('id'),reverse=False)
if check_parentage:
# check parent ordering
-
- for key in ord_segs.keys():
+
+ for key in ord_segs.keys():
existing_ids = []
for s in ord_segs[key]:
if s.id != ord_segs[key][0].id:
@@ -17920,74 +17924,74 @@ def get_ordered_segments_in_groups(self,
if include_cumulative_lengths or include_path_lengths:
import math
-
+
cumulative_lengths = {}
path_lengths_to_proximal = {}
path_lengths_to_distal = {}
-
- for key in ord_segs.keys():
+
+ for key in ord_segs.keys():
cumulative_lengths[key] = []
path_lengths_to_proximal[key] = {}
path_lengths_to_distal[key] = {}
-
+
tot_len = 0
- for seg in ord_segs[key]:
-
+ for seg in ord_segs[key]:
+
length = self.get_segment_length(seg.id)
-
+
if not seg.parent or not seg.parent.segments in path_lengths_to_distal[key]:
-
+
path_lengths_to_proximal[key][seg.id] = 0
last_seg = seg
par_seg_element = seg.parent
while par_seg_element!=None:
-
+
par_seg = segments[par_seg_element.segments]
d = par_seg.distal
p = par_seg.proximal
-
+
if not p:
par_seg_parent_seg = segments[par_seg.parent.segments]
p = par_seg_parent_seg.distal
-
+
par_length = math.sqrt( (d.x-p.x)**2 + (d.y-p.y)**2 + (d.z-p.z)**2 )
-
+
fract = float(last_seg.parent.fraction_along)
path_lengths_to_proximal[key][seg.id] += par_length*fract
-
+
last_seg = par_seg
par_seg_element = par_seg.parent
-
-
+
+
else:
pd = path_lengths_to_distal[key][seg.parent.segments]
pp = path_lengths_to_proximal[key][seg.parent.segments]
fract = float(seg.parent.fraction_along)
-
+
path_lengths_to_proximal[key][seg.id] = pp + (pd - pp)*fract
-
+
path_lengths_to_distal[key][seg.id] = path_lengths_to_proximal[key][seg.id] + length
-
+
tot_len += length
cumulative_lengths[key].append(tot_len)
-
-
+
+
if include_path_lengths and not include_cumulative_lengths:
-
+
return ord_segs, path_lengths_to_proximal, path_lengths_to_distal
if include_cumulative_lengths and not include_path_lengths:
-
+
return ord_segs, cumulative_lengths
if include_cumulative_lengths and include_path_lengths:
-
+
return ord_segs, cumulative_lengths, path_lengths_to_proximal, path_lengths_to_distal
return ord_segs
-
-
-
+
+
+
def summary(self):
print("*******************************************************")
print("* Cell: "+str(self.id))
@@ -17995,7 +17999,7 @@ def summary(self):
print("* Segments: "+str(len(self.morphology.segments)))
print("* SegmentGroups: "+str(len(self.morphology.segment_groups)))
print("*******************************************************")
-
+
# end class Cell
@@ -20520,50 +20524,50 @@ def buildAttributes(self, node, attrs, already_processed):
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(ContinuousConnection, self).buildChildren(child_, node, nodeName_, True)
pass
-
+
def _get_cell_id(self, id_string):
return int(float(id_string))
-
-
+
+
def get_pre_cell_id(self):
-
+
return self._get_cell_id(self.pre_cell)
-
+
def get_post_cell_id(self):
-
+
return self._get_cell_id(self.post_cell)
-
+
def get_pre_segment_id(self):
-
+
return int(self.pre_segment)
-
+
def get_post_segment_id(self):
-
+
return int(self.post_segment)
def get_pre_fraction_along(self):
-
+
return float(self.pre_fraction_along)
-
+
def get_post_fraction_along(self):
-
+
return float(self.post_fraction_along)
-
-
+
+
def get_pre_info(self):
-
+
return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '')
-
+
def get_post_info(self):
-
+
return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '')
-
-
+
+
def __str__(self):
-
+
return "Continuous Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", pre comp: "+str(self.pre_component)+", post comp: "+str(self.post_component)
-
-
+
+
# end class ContinuousConnection
@@ -20658,49 +20662,49 @@ def buildAttributes(self, node, attrs, already_processed):
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(ElectricalConnection, self).buildChildren(child_, node, nodeName_, True)
pass
-
+
def _get_cell_id(self, id_string):
return int(float(id_string))
-
+
def get_pre_cell_id(self):
-
+
return self._get_cell_id(self.pre_cell)
-
+
def get_post_cell_id(self):
-
+
return self._get_cell_id(self.post_cell)
-
+
def get_pre_segment_id(self):
-
+
return int(self.pre_segment)
-
+
def get_post_segment_id(self):
-
+
return int(self.post_segment)
def get_pre_fraction_along(self):
-
+
return float(self.pre_fraction_along)
-
+
def get_post_fraction_along(self):
-
+
return float(self.post_fraction_along)
-
-
+
+
def get_pre_info(self):
-
+
return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '')
-
+
def get_post_info(self):
-
+
return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '')
-
-
+
+
def __str__(self):
-
+
return "Electrical Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", synapse: "+str(self.synapse)
-
-
+
+
# end class ElectricalConnection
@@ -20806,53 +20810,53 @@ def _get_cell_id(self, id_string):
return int(id_string.split('/')[2])
def get_pre_cell_id(self):
-
+
return self._get_cell_id(self.pre_cell_id)
-
+
def get_post_cell_id(self):
-
+
return self._get_cell_id(self.post_cell_id)
def get_pre_segment_id(self):
-
+
return int(self.pre_segment_id)
-
+
def get_post_segment_id(self):
-
+
return int(self.post_segment_id)
def get_pre_fraction_along(self):
-
+
return float(self.pre_fraction_along)
-
+
def get_post_fraction_along(self):
-
+
return float(self.post_fraction_along)
-
-
+
+
def get_pre_info(self):
-
+
return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '')
-
+
def get_post_info(self):
-
+
return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '')
-
+
def __str__(self):
-
+
return "Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())
-
-
+
+
def __str__(self):
-
+
return "Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", weight: "+'%f' % (float(self.weight))+", delay: "+'%.5f' % (self.get_delay_in_ms())+" ms"
-
+
def get_delay_in_ms(self):
if 'ms' in self.delay:
return float(self.delay[:-2].strip())
elif 's' in self.delay:
return float(self.delay[:-1].strip())*1000.0
-
+
# end class ConnectionWD
@@ -20930,42 +20934,42 @@ def _get_cell_id(self, id_string):
return int(id_string.split('/')[2])
def get_pre_cell_id(self):
-
+
return self._get_cell_id(self.pre_cell_id)
-
+
def get_post_cell_id(self):
-
+
return self._get_cell_id(self.post_cell_id)
def get_pre_segment_id(self):
-
+
return int(self.pre_segment_id)
-
+
def get_post_segment_id(self):
-
+
return int(self.post_segment_id)
def get_pre_fraction_along(self):
-
+
return float(self.pre_fraction_along)
-
+
def get_post_fraction_along(self):
-
+
return float(self.post_fraction_along)
-
-
+
+
def get_pre_info(self):
-
+
return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '')
-
+
def get_post_info(self):
-
+
return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '')
-
+
def __str__(self):
-
+
return "Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())
-
+
# end class Connection
@@ -22513,19 +22517,19 @@ def buildAttributes(self, node, attrs, already_processed):
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(ContinuousConnectionInstance, self).buildChildren(child_, node, nodeName_, True)
pass
-
+
def _get_cell_id(self, id_string):
if '[' in id_string:
return int(id_string.split('[')[1].split(']')[0])
else:
return int(id_string.split('/')[2])
-
-
+
+
def __str__(self):
-
+
return "Continuous Connection (Instance based) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", pre comp: "+str(self.pre_component)+", post comp: "+str(self.post_component)
-
-
+
+
# end class ContinuousConnectionInstance
@@ -22604,18 +22608,18 @@ def buildAttributes(self, node, attrs, already_processed):
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(ElectricalConnectionInstance, self).buildChildren(child_, node, nodeName_, True)
pass
-
+
def _get_cell_id(self, id_string):
if '[' in id_string:
return int(id_string.split('[')[1].split(']')[0])
else:
return int(id_string.split('/')[2])
-
+
def __str__(self):
-
+
return "Electrical Connection (Instance based) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", synapse: "+str(self.synapse)
-
-
+
+
# end class ElectricalConnectionInstance
@@ -23342,16 +23346,16 @@ def buildAttributes(self, node, attrs, already_processed):
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(ContinuousConnectionInstanceW, self).buildChildren(child_, node, nodeName_, True)
pass
-
+
def get_weight(self):
-
+
return float(self.weight) if self.weight!=None else 1.0
-
+
def __str__(self):
-
+
return "Continuous Connection (Instance based & weight) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", pre comp: "+str(self.pre_component)+", post comp: "+str(self.post_component)+", weight: "+'%.6f'%self.get_weight()
-
-
+
+
# end class ContinuousConnectionInstanceW
@@ -23434,15 +23438,15 @@ def buildAttributes(self, node, attrs, already_processed):
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(ElectricalConnectionInstanceW, self).buildChildren(child_, node, nodeName_, True)
pass
-
+
def get_weight(self):
-
+
return float(self.weight) if self.weight!=None else 1.0
-
+
def __str__(self):
-
+
return "Electrical Connection (Instance based & weight) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", synapse: "+str(self.synapse) + ", weight: "+'%.6f'%self.get_weight()
-
+
# end class ElectricalConnectionInstanceW
From cfa3798cce6c03868050206bd3dd4842590e5279 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Mon, 15 Feb 2021 14:48:40 +0000
Subject: [PATCH 14/44] Fixes https://github.com/NeuroML/pyNeuroML/issues/73
---
neuroml/nml/helper_methods.py | 69 ++++++++++++++++++++++++++++++++++-
neuroml/nml/nml.py | 55 ++++++++++++++++++++++++++--
2 files changed, 118 insertions(+), 6 deletions(-)
diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py
index bdd9d526..c39a0071 100644
--- a/neuroml/nml/helper_methods.py
+++ b/neuroml/nml/helper_methods.py
@@ -694,6 +694,12 @@ def summary(self, show_includes=True, show_non_network=True):
info+="* "+str(tot_conns)+" connections in "+str(tot_proj)+" projections \\n"+proj_info+"*\\n"
+ if len(network.synaptic_connections)>0:
+ info+="* "+str(len(network.synaptic_connections))+" explicit synaptic connections (outside of projections)\\n"
+ for sc in network.synaptic_connections:
+ info+="* "+str(sc)+"\\n"
+ info+="*\\n"
+
tot_input_lists = 0
tot_inputs = 0
input_info = ""
@@ -709,8 +715,11 @@ def summary(self, show_includes=True, show_non_network=True):
info+="* "+str(tot_inputs)+" inputs in "+str(tot_input_lists)+" input lists \\n"+input_info+"*\\n"
- for el in network.explicit_inputs:
- info+="* Explicit input to "+el.target+" of type "+el.input+"\\n*\\n"
+ if len(network.explicit_inputs)>0:
+ info+="* "+str(len(network.explicit_inputs))+" explicit inputs (outside of input lists)\\n"
+ for el in network.explicit_inputs:
+ info+="* "+str(el)+"\\n"
+ info+="*\\n"
info+="*******************************************************"
@@ -1423,6 +1432,62 @@ def exportHdf5(self, h5file, h5Group):
METHOD_SPECS+=(ms,)
+synaptic_connections = MethodSpec(name='synaptic_connections',
+ source='''\
+
+ def _get_cell_id(self,ref):
+ if '[' in ref:
+ return int(ref.split('[')[1].split(']')[0])
+ else:
+ return int(ref.split('/')[2])
+
+ def _get_population(self,ref):
+ if '[' in ref:
+ return ref.split('[')[0]
+ else:
+ return ref.split('/')[0]
+
+ def __str__(self):
+
+ dest = self.destination if self.destination else 'unspecified'
+ return "Synaptic connection from "+str(self._get_population(self.from_))+"(cell "+str(self._get_cell_id(self.from_))+ \
+ ") -> "+str(self._get_population(self.to))+"(cell "+str(self._get_cell_id(self.to))+"), syn: "+self.synapse+", destination: "+dest
+
+
+ ''',
+ class_names=("SynapticConnection")
+ )
+
+METHOD_SPECS+=(synaptic_connections,)
+
+explicit_inputs = MethodSpec(name='explicit_inputs',
+ source='''\
+
+ def get_target_cell_id(self,):
+ if '[' in self.target:
+ return int(self.target.split('[')[1].split(']')[0])
+ else:
+ return int(self.target.split('/')[2])
+
+ def get_target_population(self,):
+ if '[' in self.target:
+ return self.target.split('[')[0]
+ else:
+ return self.target.split('/')[0]
+
+ def __str__(self):
+
+ dest = self.destination if self.destination else 'unspecified'
+ return "Explicit Input of type "+str(self.input)+" to "+self.get_target_population()+"(cell "+str(self.get_target_cell_id())+ \
+ "), destination: "+dest
+
+
+ ''',
+ class_names=("ExplicitInput")
+ )
+
+METHOD_SPECS+=(explicit_inputs,)
+
def test():
for spec in METHOD_SPECS:
spec.show()
diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py
index 90580e43..1af4fbc1 100644
--- a/neuroml/nml/nml.py
+++ b/neuroml/nml/nml.py
@@ -2,7 +2,7 @@
# -*- coding: utf-8 -*-
#
-# Generated Mon Feb 15 14:02:24 2021 by generateDS.py version 2.30.11.
+# Generated Mon Feb 15 14:47:04 2021 by generateDS.py version 2.30.11.
# Python 2.7.15 |Anaconda, Inc.| (default, Oct 23 2018, 13:35:16) [GCC 4.2.1 Compatible Clang 4.0.1 (tags/RELEASE_401/final)]
#
# Command line options:
@@ -5932,7 +5932,26 @@ def buildAttributes(self, node, attrs, already_processed):
self.validate_NmlId(self.destination) # validate type NmlId
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
-# end class SynapticConnection
+
+ def _get_cell_id(self,ref):
+ if '[' in ref:
+ return int(ref.split('[')[1].split(']')[0])
+ else:
+ return int(ref.split('/')[2])
+
+ def _get_population(self,ref):
+ if '[' in ref:
+ return ref.split('[')[0]
+ else:
+ return ref.split('/')[0]
+
+ def __str__(self):
+
+ dest = self.destination if self.destination else 'unspecified'
+ return "Synaptic connection from "+str(self._get_population(self.from_))+"(cell "+str(self._get_cell_id(self.from_))+ ") -> "+str(self._get_population(self.to))+"(cell "+str(self._get_cell_id(self.to))+"), syn: "+self.synapse+", destination: "+dest
+
+
+ # end class SynapticConnection
class ExplicitInput(GeneratedsSuper):
@@ -6047,6 +6066,25 @@ def __str__(self):
return "Input "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'%.6f'%self.get_fraction_along()+")"
+
+ def get_target_cell_id(self,):
+ if '[' in self.target:
+ return int(self.target.split('[')[1].split(']')[0])
+ else:
+ return int(self.target.split('/')[2])
+
+ def get_target_population(self,):
+ if '[' in self.target:
+ return self.target.split('[')[0]
+ else:
+ return self.target.split('/')[0]
+
+ def __str__(self):
+
+ dest = self.destination if self.destination else 'unspecified'
+ return "Explicit Input of type "+str(self.input)+" to "+self.get_target_population()+"(cell "+str(self.get_target_cell_id())+ "), destination: "+dest
+
+
# end class ExplicitInput
@@ -16144,6 +16182,12 @@ def summary(self, show_includes=True, show_non_network=True):
info+="* "+str(tot_conns)+" connections in "+str(tot_proj)+" projections \n"+proj_info+"*\n"
+ if len(network.synaptic_connections)>0:
+ info+="* "+str(len(network.synaptic_connections))+" explicit synaptic connections (outside of projections)\n"
+ for sc in network.synaptic_connections:
+ info+="* "+str(sc)+"\n"
+ info+="*\n"
+
tot_input_lists = 0
tot_inputs = 0
input_info = ""
@@ -16159,8 +16203,11 @@ def summary(self, show_includes=True, show_non_network=True):
info+="* "+str(tot_inputs)+" inputs in "+str(tot_input_lists)+" input lists \n"+input_info+"*\n"
- for el in network.explicit_inputs:
- info+="* Explicit input to "+el.target+" of type "+el.input+"\n*\n"
+ if len(network.explicit_inputs)>0:
+ info+="* "+str(len(network.explicit_inputs))+" explicit inputs (outside of input lists)\n"
+ for el in network.explicit_inputs:
+ info+="* "+str(el)+"\n"
+ info+="*\n"
info+="*******************************************************"
From b717e2d5efbe8ddd75e5fde60704c4d2782a2c2c Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Mon, 15 Feb 2021 14:55:48 +0000
Subject: [PATCH 15/44] Add note...
---
neuroml/nml/README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/neuroml/nml/README.md b/neuroml/nml/README.md
index 7116099c..7fb4b7b8 100644
--- a/neuroml/nml/README.md
+++ b/neuroml/nml/README.md
@@ -16,3 +16,4 @@ You may have to add the current folder to your PYTHONPATH, i.e.
export PYTHONPATH=$PYTHONPATH:.
+Note from PG Feb 2021: retested & regenerated using Python 2.7 with generateDS.py v2.30.11- currently fails when generated with Python 3...
From 5259eecdba2ea98e7e808b146e9973857c444058 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Wed, 17 Feb 2021 15:42:51 +0000
Subject: [PATCH 16/44] Example files regenerated
---
README.md | 10 +-
neuroml/examples/test_files/complete.nml.h5 | Bin 70042 -> 69930 bytes
neuroml/examples/test_files/testh5.nml | 220 ++++++++++----------
3 files changed, 116 insertions(+), 114 deletions(-)
diff --git a/README.md b/README.md
index e0b87c59..45ef5928 100644
--- a/README.md
+++ b/README.md
@@ -29,8 +29,11 @@ Most of the work happens in the [development branch](https://github.com/NeuralEn
## Changelog
-### version 0.2.50
- - Updated to use the final stable Schema for NeuroML v2.0
+### version 0.2.54
+ - Using Schema for NeuroML v2.1. Better compatibility with Python 3
+
+ ### version 0.2.50
+ - Updated to use the final stable Schema for NeuroML v2.0
### version 0.2.47
- Updated to use the final stable Schema for NeuroML v2beta5
@@ -44,7 +47,7 @@ Most of the work happens in the [development branch](https://github.com/NeuralEn
### version 0.2.2
- Updated to use the Schema for NeuroML v2beta3
- - Ensures numpy & pytables are only required when using non-XML loaders/writers
+ - Ensures numpy & pytables are only required when using non-XML loaders/writers
### version 0.2.0
- Updated to use the Schema for NeuroML v2beta2
@@ -69,4 +72,3 @@ Most of the work happens in the [development branch](https://github.com/NeuralEn
[![Build Status](https://api.travis-ci.org/NeuralEnsemble/libNeuroML.png)](https://travis-ci.org/NeuralEnsemble/libNeuroML)
-
diff --git a/neuroml/examples/test_files/complete.nml.h5 b/neuroml/examples/test_files/complete.nml.h5
index c5cb4929d569e8d555315381c71eebcc2e981b1f..85ee88f20da02a95a84864aed0211e8e9162825a 100644
GIT binary patch
delta 3785
zcmbVPT~Jd;6u!Ggz4z8a5N!An3|Ik~DN?;)#u;m>R{5zmFixvX8~%id(-Bf>4L+3M
zjDR?{(9;Jd)f#c8gG{FquVWb=+SHB~@g?@5%^Q4;{v8+ugex64Fi!$;0m1
zvuF35@BHnJ>%6YNSJ#z|ay)CZ7a={AEm}v$A(4)gq6M%n3+dGyU8W8-LaCTd*hj@J
zVInp{mq}Fon6#6OBho6Qz@(IkoDe5xtu-wdw4a4dg1Lzi2l(1~jS&uy=Vt^Kg9zqj
z`fXhmjH?&)8sT2a))vI(Wr9&7kg!A$1)Z>`XwU1f5l%c2ry^Joz*C#54lJui8UHJa
z;k1}Ki>Wh)VtO32k&HR96zjp*T*(=)DHI&)x!5fjjlXt
z!uY|c1XAb9C8n{p30{t`P$JEkhxg~YbW)@)J(fUSI+Rc+4RSNNGb^FqU{fM7M5$$T
z*a*D|2BPUn;vsAn)SMNsVAKN_Qfx|~lEHFte=JyqYh_yZHzF7piE_x5;1X9+V7zEF
zsHqy%)H-Rv$W%^rw`QR}wKUCu(O#%G+LT!zbBP5T!@PPmovM1Gjq5Y0a#uzj4!j8c
z`G=4f?5kJnVoWbpg1Nwqdonzp0tEUsK2p?o*T^#Jfcxw6$(f8idP{lUMm?{fV8$E;
zl(tGL+`v!-D2hULI?sS{U_Hni>Xd;zw&h#T<|tuQE3}8W3U`umRH0RuoKHE$`Bu~g
z7xEiWB_tJuuppD#TCf!3-?7?hl4O((pKhfvR$TYLT%9~*`5!7HBw%&
zeKTC#Mya7TLbb^&2n1ebpqL09JaH+_T8#x*sM#D%sUz08PX8U
z80~1I|R8&t*`h2JvZ|)&X9wNj$+wXnv9BhC1|3kptBJ*^NxfA|
z3Z)Xp8fboPI!;4V+{m(wjw2c^+bLaar};ZXOdhl+${p{s>jexsX_=@;MemWinK%r|
zO@~^1+~{D=g@Mpcjq|hqHbw;Hgm$(zo?!DZmvh?Z%JuQ}@L|ReKDZru+B$wV*+T`~
z?fHVXHYM_CG^s@0@BwwzKRtixh*cVIW>zuJ4??RW{>pRg&BevAer%`}qOq+VK27$DFY%CzvR9XV;00;j_8
z;^7`BugRG0%QQ1ikyQIojr&-Zao0!iOM^ln%s1C)c{HA33QStA;X${j{{@T^Vc!{>
z;&-vVnbT}uFjV%^T>4yV$iuXf7{1YA8sYx4bi8-giUfGo-GF^x&OwL!&T8Sb^!lU+
zF+7bS-EB|OD4P{42I6aG5Btm0t4vo=Zi25x{$<|9{b^zk%%<7K5&7acxS*3ig24-CA4NEKyr)-kbCj2xtEU$pvAxCaA3=OEKhqtyg0vS-G_F$1yAR-=TPe|diTraCrTO4C6LKl4zt+B@x&E(G%yx*r+TfyYx4o|uL+PeY*zhtT;Q
>
zc|(la9`-3=pPnE^bQ~cWXcdIW2k0~y_1QVj6iNHJ_(N)H
z(IVz_Xwh0a+L9TZisW2!+QhY6)UGW~41XED$9sZYBsP@-Vm^mn4ocyT9b6!}%v_^TC~i&TldxW$Shp|X@-XT0vRFNb
zSi7tp$`{})A}Mj62!7wpwXKf{={+^
zZBhffqu?b+tL>qjUupS83QHVN_UCGKf_hCl^;KnY%#$pf4y{s(V@1EOk
z8plpNXj7Dr5lk)*pceJk3cVin!2P@e`YCoWW9ziB!I4Xlh_K}Xb@Tpyfpo%3Zk4YE6|*7UmL(xN$SmJ
zqDL;M-Nd}!-dg~94n3W<^;)6(WJW4=X&(IxhxMV#+VW$taLe+=RQOKRi=&lad#W2H
zn5eTcKQ`Op#O4fUvCj^hw3)uo3Rs(s^ezLZ&Z4g_q6XLxtG?v*l#e1PHB&yeaOvb{
z{5j`iozk+0l2usV`|^Vx9Q*UR@;Sk
zzJ9yWU2fC@pA>u1Ubs>mMZdjRqkrg7EnIii;@WpqTM5x)YYD9UY7Jv7<_xCm7>jS_
zGqBG>WKo)XU}|4B?mwuG)U(`BR~@*Eh+{v8=K@r|mzpdsXMUeA|D2hR3Y*;(ZZrc*
zg_qKkc`(ZFA1Z|3LG+J$u~M(c7b@Y=Rp#=jm)Y9h%gAoi#u07Q_c6)#J{Fp#_3O#d
zH{1sqy6Ur0P`y`g#>6VxkOZwq%;y_j$7SJZIOb+O7oBK~8<8xh_#~|76zj4xTs|!H
z2J2(>#>85R>wi5w4^w7?eKs-S@5XDU_|Fmy%jP4m=eyyk8(W)UUGZT``^Kgy!iSG)
zgpVBy;yj||>B6CAfr<7p8T!3Bl-RkD-+YoC!R?c|Bq+$?M=+^oo}!6Bfsw;iNbMxs4}Fboow`}ZdyI3I=#f&
z`&w`_oebRh?f~AOtYJgr@bD@twsSc+-|Y>u5{=CAlW2Hp3T=4T_$!xCNf24hKm-{t
z0ddGk*$uvd*8GU$)@u>U*ZJ`+uZ6}NhroW{p!QV$OYqE%1kccVHyUZ(2(Mo)ka@>@
ze8$J$g_Ax0Pz~aREGP-<#ZyEt&f3rNYpgw|SeJ=Qa65l3RrBuf>1BMu}5^R
z@WzoY)}ee0)+E^!zeR$lb@T53omObrO$LACzVNHcY54hGq?({IHFIP>#vd7YC2`#s
ze!o>E$q&*S%>!H4ojy`8Wy}F@*Ta
z5>rIbxZ3q-K1QADmB)Jh!l3ws-7R%7h0^zu^`ud4eE-!l7z$a)xcKjYLBUOvT86bl
z<};5e{{M#rku+!+IFcvN-Dn6lPIyr}C=-#B!POI5{Cewe?xnx2B
diff --git a/neuroml/examples/test_files/testh5.nml b/neuroml/examples/test_files/testh5.nml
index 4a2759fa..816ebc5a 100644
--- a/neuroml/examples/test_files/testh5.nml
+++ b/neuroml/examples/test_files/testh5.nml
@@ -12,137 +12,137 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
From 8c16cf73bf525f9697a9c67f8063bd6f0c566439 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 11:05:33 +0000
Subject: [PATCH 17/44] enh(requirements): remove python max version limit for
simplejson
---
requirements.txt | 2 +-
setup.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/requirements.txt b/requirements.txt
index 9b4e7051..86d02b42 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,7 +4,7 @@ cython
numpy
pymongo
numexpr
-simplejson; python_version < '3.5'
+simplejson
tables>=3.3.0
jsonpickle>=0.9.6
nose
diff --git a/setup.py b/setup.py
index d41a8895..78ee8f8c 100644
--- a/setup.py
+++ b/setup.py
@@ -25,7 +25,7 @@
"numpy",
"pymongo",
"numexpr",
- "simplejson; python_version < '3.5'",
+ "simplejson",
"tables>=3.3.0",
"jsonpickle>=0.9.6"
]},
From 438ffb6ea41637920bab234606624780c349b0ac Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 11:10:41 +0000
Subject: [PATCH 18/44] chore: update setup.py classifiers
---
setup.py | 13 +++++++++++++
1 file changed, 13 insertions(+)
diff --git a/setup.py b/setup.py
index 78ee8f8c..0c5bee2d 100644
--- a/setup.py
+++ b/setup.py
@@ -36,7 +36,20 @@
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
+ 'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.1',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: 3.9',
+ 'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering']
)
From 6943cd19d975f39222ea2c7173b74ba600b61919 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 11:13:45 +0000
Subject: [PATCH 19/44] chore: ignore .venv virtualenv directory
---
.gitignore | 2 ++
1 file changed, 2 insertions(+)
diff --git a/.gitignore b/.gitignore
index 157897d1..0284d3ec 100644
--- a/.gitignore
+++ b/.gitignore
@@ -48,3 +48,5 @@ neuroml/test/*.h5
/tests.log
/notebooks/.ipynb_checkpoints
/mongoo
+
+.venv
From f5f67b80908ed856b267f1d4a6e8fc3cb7fa9eca Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 11:17:48 +0000
Subject: [PATCH 20/44] chore: remove unused imports
---
neuroml/arraymorph.py | 2 --
1 file changed, 2 deletions(-)
diff --git a/neuroml/arraymorph.py b/neuroml/arraymorph.py
index 907f4082..251587d8 100644
--- a/neuroml/arraymorph.py
+++ b/neuroml/arraymorph.py
@@ -2,9 +2,7 @@
Prototype for object model backend for the libNeuroML project
"""
-import math
import numpy as np
-import numpy.ma as ma
import neuroml
From b955f93e9f3943a3c2e507763f25c12f1a70dfee Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 11:20:08 +0000
Subject: [PATCH 21/44] cosmetic: remove trailing whitespace
---
neuroml/arraymorph.py | 44 +++++++++++++++++++++----------------------
1 file changed, 22 insertions(+), 22 deletions(-)
diff --git a/neuroml/arraymorph.py b/neuroml/arraymorph.py
index 251587d8..10d9c7d8 100644
--- a/neuroml/arraymorph.py
+++ b/neuroml/arraymorph.py
@@ -10,7 +10,7 @@ class ArrayMorphology(neuroml.Morphology):
"""Core of the array-based object model backend.
Provides the core arrays - vertices,connectivity etc.
- node_types.
+ node_types.
The connectivity array is a list of indices pointing to which
other element an element is attached. So for instance,
@@ -19,7 +19,7 @@ class ArrayMorphology(neuroml.Morphology):
- EXAMPLE:
- Vertices[3] and connectivity[3] refer to the vertex
+ Vertices[3] and connectivity[3] refer to the vertex
and connectivity of the same node.
.. note::
@@ -41,7 +41,7 @@ def __init__(self,
self.connectivity = np.array(connectivity)
self.vertices = np.array(vertices)
-
+
self.id = id
if np.any(physical_mask):
@@ -92,8 +92,8 @@ def valid_ids(self):
valid_flag = (internal_id == external_id) * valid_flag
return valid_flag
-
-
+
+
@property
def __all_nodes_satisfied(self):
m = self.vertices.shape[0]
@@ -102,14 +102,14 @@ def __all_nodes_satisfied(self):
all_nodes_satisfied = (m == n == p)
return all_nodes_satisfied
-
+
@property
def root_index(self):
- return np.where(self.connectivity == -1)[0][0]
+ return np.where(self.connectivity == -1)[0][0]
@property
def root_vertex(self):
- return self.vertices[self.root_index]
+ return self.vertices[self.root_index]
@property
def num_vertices(self):
@@ -120,7 +120,7 @@ def physical_indices(self):
"""returns indices of vertices which are physical"""
physical_indices = np.where(self.physical_mask == 0)[0]
return physical_indices
-
+
def children(self,index):
"""Returns an array with indexes of children"""
return np.where(self.connectivity == index)
@@ -132,7 +132,7 @@ def to_root(self,index):
"""
old_root_index = self.root_index
- new_root_index = index
+ new_root_index = index
#do a tree traversal:
parent_index = self.connectivity[index]
grandparent_index=self.connectivity[parent_index]
@@ -153,13 +153,13 @@ def vertex(self,index):
def __len__(self):
return len(self.connectivity)
-
+
def pop(self,index):
"""
TODO:This is failing tests (understandably) - need to fix!
Deletes a node from the morphology, its children become
children of the deleted node's parent.
- """
+ """
self.vertices = np.delete(self.vertices,index)
self.node_types = np.delete(self.node_types,index)
@@ -186,16 +186,16 @@ def to_neuroml_morphology(self,id=""):
def segment_from_vertex_index(self,index):
parent_index = self.connectivity[index]
-
+
node_x = self.vertices[index][0]
node_y = self.vertices[index][1]
node_z = self.vertices[index][2]
node_d = self.vertices[index][3]
-
+
parent_x = self.vertices[parent_index][0]
parent_y = self.vertices[parent_index][1]
parent_z = self.vertices[parent_index][2]
- parent_d = self.vertices[parent_index][3]
+ parent_d = self.vertices[parent_index][3]
p = neuroml.Point3DWithDiam(x=node_x,
y=node_y,
@@ -207,7 +207,7 @@ def segment_from_vertex_index(self,index):
z=parent_z,
diameter=parent_d)
-
+
seg = neuroml.Segment(proximal=p,
distal=d,
id=index)
@@ -216,7 +216,7 @@ def segment_from_vertex_index(self,index):
seg.parent = parent
return seg
-
+
class SegmentList(object):
"""
This class is a proxy, it returns a segment either
@@ -247,7 +247,7 @@ def __len__(self):
segments which is number of vertices - 1 and minus all
floating segments.
"""
-
+
num_vertices = self.arraymorph.num_vertices
num_floating = np.sum(self.arraymorph.physical_mask)
num_segments = num_vertices - num_floating -1
@@ -283,7 +283,7 @@ def append(self,segment):
"""
dist_vertex_index = len(self.arraymorph.vertices)
prox_vertex_index = dist_vertex_index + 1
-
+
prox_x = segment.proximal.x
prox_y = segment.proximal.y
prox_z = segment.proximal.z
@@ -293,7 +293,7 @@ def append(self,segment):
dist_y = segment.distal.y
dist_z = segment.distal.z
distal_diam = segment.distal.diameter
-
+
prox_vertex = [prox_x,prox_y,prox_z,prox_diam]
dist_vertex = [dist_x,dist_y,dist_z,distal_diam]
@@ -303,11 +303,11 @@ def append(self,segment):
self.arraymorph.vertices = np.array([dist_vertex,prox_vertex])
self.arraymorph.connectivity = np.append(self.arraymorph.connectivity,[-1,dist_vertex_index])
-
+
if len(self.arraymorph.physical_mask) == 0:
self.arraymorph.physical_mask = np.array([0,0])
else:
self.arraymorph.physical_mask = np.append(self.arraymorph.physical_mask,[1,0])
-
+
segment_index = len(self) - 1
self.instantiated_segments[segment_index] = segment
From c84077f2cce876f8e263550293d3580a5625e637 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 11:44:05 +0000
Subject: [PATCH 22/44] chore(tests): ignore Numpy's VisibleDeprecationWarning
We're intentionally testing invalid values for vertices here. Numpy
doesn't like us using elements with different dimensions when creating a
np.array in arraymorph.py and throws a VisibleDeprecationWarning. This
is expected, so we suppress this warning.
---
neuroml/test/test_arraymorph.py | 12 +++++++++---
1 file changed, 9 insertions(+), 3 deletions(-)
diff --git a/neuroml/test/test_arraymorph.py b/neuroml/test/test_arraymorph.py
index a5b8fb1c..e8fb887f 100644
--- a/neuroml/test/test_arraymorph.py
+++ b/neuroml/test/test_arraymorph.py
@@ -1,3 +1,4 @@
+import warnings
import neuroml.arraymorph as am
import neuroml
import numpy as np
@@ -275,9 +276,14 @@ def test_valid_morphology(self):
Should return false if morphology is invalid
"""
- vertices=[[0,0,0],[1,1]]
- connectivity=[-1,0]
- self.assertRaises(AssertionError,am.ArrayMorphology,vertices,connectivity)
+ # We're using vertices with inconsistent dimensions here, which Numpy
+ # does not like.
+ # Ignore the VisibleDeprecationWarning that numpy throws.
+ with warnings.catch_warnings():
+ warnings.filterwarnings("ignore", "Creating an ndarray from ragged nested sequences")
+ vertices=[[0,0,0],[1,1]]
+ connectivity=[-1,0]
+ self.assertRaises(AssertionError,am.ArrayMorphology,vertices,connectivity)
vertices=[[0,0,0],[1,1,1]]
connectivity=[-1,0,0]
From b7840ece1b060a6512ba87c818031b38495a3e8d Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 11:53:25 +0000
Subject: [PATCH 23/44] chore: remove unused imports
---
neuroml/test/test_writers.py | 3 ---
1 file changed, 3 deletions(-)
diff --git a/neuroml/test/test_writers.py b/neuroml/test/test_writers.py
index c6d84a72..421118b9 100644
--- a/neuroml/test/test_writers.py
+++ b/neuroml/test/test_writers.py
@@ -4,9 +4,6 @@
"""
import neuroml
-from neuroml import writers
-from neuroml import loaders
-import os
import numpy as np
from neuroml import arraymorph as am
import tempfile
From 7da280aa65f0d7d0012499abf1fd7db27bbeda2c Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 11:56:06 +0000
Subject: [PATCH 24/44] cosmetic: whitespace fixes
---
neuroml/test/test_writers.py | 56 +++++++++++++++++-------------------
1 file changed, 27 insertions(+), 29 deletions(-)
diff --git a/neuroml/test/test_writers.py b/neuroml/test/test_writers.py
index 421118b9..e811585b 100644
--- a/neuroml/test/test_writers.py
+++ b/neuroml/test/test_writers.py
@@ -13,8 +13,9 @@
except ImportError:
import unittest
+
class TestJSONWriter(unittest.TestCase):
-
+
def setUp(self):
num_segments = int(1e4) #Per cell
num_vertices = num_segments + 1
@@ -25,7 +26,7 @@ def setUp(self):
d = np.linspace(1,0.01,num_vertices)
vertices = np.array([x,y,z,d]).T
-
+
connectivity = range(-1,num_segments)
big_arraymorph = am.ArrayMorphology(vertices = vertices,
@@ -53,17 +54,17 @@ def setUp(self):
self.cell_1 = neuroml.Cell(id='cell_1')
self.cell_2 = neuroml.Cell(id='cell_2')
self.cell_3 = neuroml.Cell(id='cell_3')
-
+
self.cell_1.morphology = transposed_arraymorph
self.cell_2.morphology = fatter_arraymorph
self.cell_3.morphology = big_arraymorph
-
+
self.test_doc = neuroml.NeuroMLDocument(id='TestDocument')
self.test_doc.cells.append(self.cell_1)
self.test_doc.cells.append(self.cell_2)
self.test_doc.cells.append(self.cell_3)
- self.test_doc.cells.append(neuroml_cell)
+ self.test_doc.cells.append(neuroml_cell)
def test_write_to_mongodb_backend(self):
writer_method = neuroml.writers.JSONWriter.write_to_mongodb
@@ -83,28 +84,27 @@ def test_write_to_mongodb_expected(self):
"""
db_name = 'test_db_4'
-
+
writer_method = neuroml.writers.JSONWriter.write_to_mongodb
writer_method(neuroml_document = self.test_doc,
db = db_name)
loader_method = neuroml.loaders.JSONLoader.load_from_mongodb
-
+
doc = loader_method(db = db_name,
id = self.test_doc.id,
host = 'localhost')
-
+
array_morph = doc.cells[2].morphology
-
+
connectivity_equal = np.testing.assert_array_equal(array_morph.connectivity,self.big_arraymorph.connectivity)
physical_masks_equal = np.testing.assert_array_equal(array_morph.physical_mask,self.big_arraymorph.physical_mask)
vertices_equal = np.testing.assert_array_equal(array_morph.vertices,self.big_arraymorph.vertices)
-
-
+
+
self.assertEqual(connectivity_equal,None) #None when equal
self.assertEqual(physical_masks_equal,None) #None when equal
- self.assertEqual(vertices_equal,None) #None when equal
-
+ self.assertEqual(vertices_equal,None) #None when equal
def test_write_multiple_morphologies(self):
filename = tempfile.mkstemp()[1]
@@ -119,26 +119,26 @@ def test_write_expected(self):
More of an integration test, write a file and confirm the contents are
as expected.
"""
-
+
filename = tempfile.mkstemp()[1]
-
+
writer_method = neuroml.writers.JSONWriter.write
writer_method(self.test_doc,filename)
-
+
loader_method = neuroml.loaders.JSONLoader.load
-
+
doc = loader_method(filename)
-
+
array_morph = doc.cells[2].morphology
-
+
connectivity_equal = np.testing.assert_array_equal(array_morph.connectivity,self.big_arraymorph.connectivity)
physical_masks_equal = np.testing.assert_array_equal(array_morph.physical_mask,self.big_arraymorph.physical_mask)
vertices_equal = np.testing.assert_array_equal(array_morph.vertices,self.big_arraymorph.vertices)
-
-
+
+
self.assertEqual(connectivity_equal,None) #None when equal
self.assertEqual(physical_masks_equal,None) #None when equal
- self.assertEqual(vertices_equal,None) #None when equal
+ self.assertEqual(vertices_equal,None) #None when equal
def test_writer_instance(self):
filename = tempfile.mkstemp()[1]
@@ -151,7 +151,6 @@ def test_writer_instance(self):
self.assertIsInstance(document,neuroml.NeuroMLDocument)
-
class TestNeuroMLWriter(unittest.TestCase):
def test_write_nonsense(self):
@@ -164,7 +163,7 @@ def test_write_nonsense(self):
self.assertRaises(AttributeError, writer_method, a, "tmpfile")
class TestArrayMorphWriter(unittest.TestCase):
-
+
def setUp(self):
num_segments = int(1e6)
num_vertices = num_segments + 1
@@ -175,7 +174,7 @@ def setUp(self):
d = np.linspace(1,0.01,num_vertices)
vertices = np.array([x,y,z,d]).T
-
+
connectivity = range(-1,num_segments)
big_arraymorph = am.ArrayMorphology(vertices = vertices,
@@ -200,7 +199,7 @@ def setUp(self):
self.cell_1 = neuroml.Cell(id='cell_1')
self.cell_2 = neuroml.Cell(id='cell_2')
self.cell_3 = neuroml.Cell(id='cell_3')
-
+
self.cell_1.morphology = transposed_arraymorph
self.cell_2.morphology = fatter_arraymorph
self.cell_3.morphology = big_arraymorph
@@ -210,7 +209,7 @@ def setUp(self):
self.test_doc.cells.append(self.cell_1)
self.test_doc.cells.append(self.cell_2)
self.test_doc.cells.append(self.cell_3)
-
+
def test_write_big_arraymorph(self):
writer_method = neuroml.writers.ArrayMorphWriter.write
filename = tempfile.mkstemp()[1]
@@ -239,10 +238,9 @@ def test_write_expected(self):
physical_masks_equal = np.testing.assert_array_equal(array_morph.physical_mask,self.big_arraymorph.physical_mask)
vertices_equal = np.testing.assert_array_equal(array_morph.vertices,self.big_arraymorph.vertices)
-
self.assertEqual(connectivity_equal,None) #None when equal
self.assertEqual(physical_masks_equal,None) #None when equal
- self.assertEqual(vertices_equal,None) #None when equal
+ self.assertEqual(vertices_equal,None) #None when equal
def test_write_multiple_morphologies(self):
filename = tempfile.mkstemp()[1]
From 1852db411ece46dc2c9a7cfb63eb6ce1322d618d Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 12:14:16 +0000
Subject: [PATCH 25/44] style: ignore some flake warnings
Since the code uses a style that flake doesn't like, disable some
of these warnings. If/when we pick a style etc., these can be
re-enabled.
---
setup.cfg | 5 +++++
1 file changed, 5 insertions(+)
create mode 100644 setup.cfg
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 00000000..72248c82
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,5 @@
+[flake8]
+# ignore:
+# spacing around operators, comment blocks, in argument lists
+# lines too long
+ignore = E501,E502,F403,F405,E231,E228,E225,E226,E265,E261
From b69520dd70ac8d197f17be29d94e67e35570e83d Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 12:17:55 +0000
Subject: [PATCH 26/44] cosmetic: whitespace fixes
---
neuroml/test/test_cell.py | 115 +++++++++++++++++++-------------------
1 file changed, 58 insertions(+), 57 deletions(-)
diff --git a/neuroml/test/test_cell.py b/neuroml/test/test_cell.py
index c17a6323..3ee4e895 100644
--- a/neuroml/test/test_cell.py
+++ b/neuroml/test/test_cell.py
@@ -22,14 +22,15 @@
except ImportError:
import unittest
+
class TestCell(unittest.TestCase):
-
+
def test_cell_methods(self):
-
+
cells = ['Purk2M9s','pyr_4_sym.cell']
-
+
cells = ['pyr_4_sym']
-
+
for cell_name in cells:
local_path = '../examples/test_files/%s.cell.nml'%cell_name
@@ -42,139 +43,139 @@ def test_cell_methods(self):
f = open(test_file_path,'r')
doc = loaders.NeuroMLLoader.load(test_file_path)
-
+
cell = doc.cells[0]
self.assertEqual(cell.id,cell_name.split('.')[0])
-
+
exp_num_segs = 9
self.assertEqual(cell.morphology.num_segments,exp_num_segs)
self.assertEqual(len(cell.get_segment_ids_vs_segments()),exp_num_segs)
-
+
seg0 = cell.get_segment(0)
-
+
self.assertRaises(Exception, lambda: cell.get_segment(-1)) # Seg -1 doesn't exist...
-
+
cell.summary()
-
-
+
#cell.get_ordered_segments_in_groups = get_ordered_segments_in_groups
-
- for grp in ['soma_group', ['soma_group','basal_dends'],['dendrite_group'],['all'] ]:
-
+
+ for grp in ['soma_group', ['soma_group','basal_dends'],['dendrite_group'],['all']]:
+
print("-----------------------------")
print(" Testing %s..."%grp)
-
- segs, cuml, path_prox, path_dist = cell.get_ordered_segments_in_groups(grp,
- check_parentage=(grp==['all']),
- include_cumulative_lengths=True,
- include_path_lengths=True)
-
+
+ segs, cuml, path_prox, path_dist = cell.get_ordered_segments_in_groups(
+ grp,
+ check_parentage=(grp==['all']),
+ include_cumulative_lengths=True,
+ include_path_lengths=True
+ )
+
print("Segs %s: %s"%(grp,segs))
print("Cuml %s: %s"%(grp,cuml))
print("Prox %s: %s"%(grp,path_prox))
print("Dist %s: %s"%(grp,path_dist))
-
+
for s in segs:
assert len(segs[s])==len(cuml[s])
##assert len(segs[s])==len(path_prox[s])
##assert len(segs[s])==len(path_dist[s])
-
+
if grp=='soma_group':
assert len(segs['soma_group'])==1
soma_len = cuml['soma_group'][-1]
print("soma_len: %s"%soma_len)
-
+
if grp==['all']:
assert len(segs['all'])==9
all_len = cuml['all'][-1]
-
+
if grp==['dendrite_group']:
assert len(segs['dendrite_group'])==8
dend_len = cuml['dendrite_group'][-1]
print("dend_len: %s"%dend_len)
-
+
assert all_len == soma_len+dend_len
-
def test_cell_methods2(self):
cell = Cell(id='cell0')
-
+
diam = 1.
d0=Point3DWithDiam(x=0, y=0, z=0, diameter=diam)
p=Point3DWithDiam(x=0, y=0, z=0, diameter=diam)
-
+
seg0 = Segment(id=0, name='soma',proximal=p, distal=d0)
-
+
d1=Point3DWithDiam(x=10, y=0, z=0, diameter=diam)
-
+
cell.morphology = Morphology()
cell.morphology.segments.append(seg0)
-
+
seg1 = Segment(id=1, distal=d1, parent=SegmentParent(0))
cell.morphology.segments.append(seg1)
-
+
d2=Point3DWithDiam(x=20, y=0, z=0, diameter=diam)
-
- seg2 = Segment(id=2, proximal =d1, distal=d2, parent=SegmentParent(seg1.id))
+
+ seg2 = Segment(id=2, proximal=d1, distal=d2, parent=SegmentParent(seg1.id))
cell.morphology.segments.append(seg2)
-
+
d3=Point3DWithDiam(x=20, y=10, z=0, diameter=diam)
-
+
seg3 = Segment(id=3, distal=d3, parent=SegmentParent(seg2.id, fraction_along=1))
cell.morphology.segments.append(seg3)
-
+
sg1 = SegmentGroup(id='all')
for seg in [seg0,seg1,seg2,seg3]:
sg1.members.append(Member(seg.id))
cell.morphology.segment_groups.append(sg1)
-
+
sg2 = SegmentGroup(id='soma_group')
for seg in [seg0]:
sg2.members.append(Member(seg.id))
cell.morphology.segment_groups.append(sg2)
-
+
sg3 = SegmentGroup(id='dend_group')
for seg in [seg1,seg2,seg3]:
sg3.members.append(Member(seg.id))
cell.morphology.segment_groups.append(sg3)
-
+
sg4 = SegmentGroup(id='soma_dends')
for sg in [sg2,sg3]:
sg4.includes.append(Include(sg.id))
cell.morphology.segment_groups.append(sg4)
-
+
expected = {sg1.id:4,sg2.id:1,sg3.id:3,sg4.id:4}
-
+
for sg in [sg1,sg2,sg3,sg4]:
segs = cell.get_all_segments_in_group(sg.id)
print('\nSeg group %s has segments: %s'%(sg,segs))
self.assertEqual(expected[sg.id],len(segs))
-
+
osegs = cell.get_ordered_segments_in_groups(sg.id)
print('Seg group %s has ordered segments: %s'%(sg.id,osegs))
self.assertEqual(expected[sg.id],len(osegs[sg.id]))
-
- ord_segs, cumulative_lengths, path_lengths_to_proximal, path_lengths_to_distal = cell.get_ordered_segments_in_groups(sg.id,
- include_cumulative_lengths=True,
- include_path_lengths=True)
-
+
+ ord_segs, cumulative_lengths, path_lengths_to_proximal, path_lengths_to_distal = cell.get_ordered_segments_in_groups(
+ sg.id,
+ include_cumulative_lengths=True,
+ include_path_lengths=True
+ )
+
print('Seg group %s has cumulative_lengths: %s'%(sg.id,cumulative_lengths))
self.assertEqual(expected[sg.id],len(cumulative_lengths[sg.id]))
-
+
print('Seg group %s has path_lengths_to_proximal: %s'%(sg.id,path_lengths_to_proximal))
self.assertEqual(expected[sg.id],len(path_lengths_to_proximal[sg.id]))
-
+
print('Seg group %s has path_lengths_to_distal: %s'%(sg.id,path_lengths_to_distal))
self.assertEqual(expected[sg.id],len(path_lengths_to_distal[sg.id]))
-
-
-
+
def runTest(self):
print("Running tests in TestCell")
-
+
+
if __name__ == '__main__':
-
ta = TestCell()
-
+
ta.test_cell_methods()
- ta.test_cell_methods2()
\ No newline at end of file
+ ta.test_cell_methods2()
From b04acd950797b5f687d885765643feba1ca95353 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 12:18:21 +0000
Subject: [PATCH 27/44] fix: remove unused variables
For the open but unused/unclosed file handle, Python was throwing a
warning.
---
neuroml/test/test_cell.py | 5 -----
1 file changed, 5 deletions(-)
diff --git a/neuroml/test/test_cell.py b/neuroml/test/test_cell.py
index 3ee4e895..89a24686 100644
--- a/neuroml/test/test_cell.py
+++ b/neuroml/test/test_cell.py
@@ -40,19 +40,14 @@ def test_cell_methods(self):
root_dir = os.path.dirname(neuroml.__file__)
test_file_path = os.path.join(root_dir,'examples/test_files/%s.cell.nml'%cell_name)
print('test file path is: '+test_file_path)
- f = open(test_file_path,'r')
doc = loaders.NeuroMLLoader.load(test_file_path)
-
cell = doc.cells[0]
self.assertEqual(cell.id,cell_name.split('.')[0])
exp_num_segs = 9
self.assertEqual(cell.morphology.num_segments,exp_num_segs)
self.assertEqual(len(cell.get_segment_ids_vs_segments()),exp_num_segs)
-
- seg0 = cell.get_segment(0)
-
self.assertRaises(Exception, lambda: cell.get_segment(-1)) # Seg -1 doesn't exist...
cell.summary()
From 7b5567e4d909ec03e898ba277511b234a0afa76b Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 12:20:09 +0000
Subject: [PATCH 28/44] cosmetic: whitespace fixes
---
neuroml/test/test_hdf5_parser.py | 24 ++++++++++--------------
1 file changed, 10 insertions(+), 14 deletions(-)
diff --git a/neuroml/test/test_hdf5_parser.py b/neuroml/test/test_hdf5_parser.py
index cf5a5285..1e0a3477 100644
--- a/neuroml/test/test_hdf5_parser.py
+++ b/neuroml/test/test_hdf5_parser.py
@@ -14,42 +14,39 @@
except ImportError:
import unittest
+
class TestNeuroMLHDF5Parser(unittest.TestCase):
base_dir = os.path.dirname(__file__)
#base_dir = '.'
-
+
def test_write_load_hdf5(self):
-
#for f in []:
#for f in ['MediumNet.net.nml']:
for f in ['simplenet.nml','testh5.nml','MediumNet.net.nml','complete.nml']:
file_name = '%s/../examples/test_files/%s'%(self.base_dir,f)
-
+
print("Loading %s"%file_name)
-
+
nml_doc0 = loaders.read_neuroml2_file(file_name,include_includes=True)
summary0 = nml_doc0.summary()
-
+
print(summary0)
nml_h5_file = '%s/../examples/tmp/%s.h5'%(self.base_dir,f)
writers.NeuroMLHdf5Writer.write(nml_doc0, nml_h5_file)
print("Written to: %s"%nml_h5_file)
-
+
nml_doc2 = loaders.NeuroMLHdf5Loader.load(nml_h5_file)
summary1 = nml_doc2.summary()
print('\n'+summary1)
compare(summary0,summary1)
-
-
-
+
def test_parse(self):
-
file_name = self.base_dir+'/../examples/test_files/testh5.nml'
-
+
nml_doc0 = loaders.NeuroMLLoader.load(file_name)
summary0 = nml_doc0.summary(show_includes=False,show_non_network=False)
print('\n'+summary0)
@@ -68,12 +65,11 @@ def test_parse(self):
compare(summary0,summary1)
-
def runTest(self):
print("Running tests in TestNeuroMLHDF5Parser")
if __name__ == '__main__':
-
+
tnxp = TestNeuroMLHDF5Parser()
- tnxp.test_write_load_hdf5()
\ No newline at end of file
+ tnxp.test_write_load_hdf5()
From 291b52bddc4b3c27a0e969654f456c30d055f9f9 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 12:22:17 +0000
Subject: [PATCH 29/44] cosmetic: whitespace fixes
---
neuroml/test/test_hdf5_optimized.py | 35 ++++++++++++++---------------
1 file changed, 17 insertions(+), 18 deletions(-)
diff --git a/neuroml/test/test_hdf5_optimized.py b/neuroml/test/test_hdf5_optimized.py
index 8c3d587e..300cfdaa 100644
--- a/neuroml/test/test_hdf5_optimized.py
+++ b/neuroml/test/test_hdf5_optimized.py
@@ -15,43 +15,42 @@
except ImportError:
import unittest
+
class TestNeuroMLHDF5Optimized(unittest.TestCase):
base_dir = os.path.dirname(__file__)
#base_dir = '.'
-
-
+
def runTest(self):
print("Running tests in TestNeuroMLHDF5Optimized")
-
+
def test_write_load(self):
-
+
#for f in []:
#for f in ['complete.nml']:
#for f in ['simplenet.nml','testh5.nml','MediumNet.net.nml','complete.nml']:
-
+
for f in ['simplenet.nml','MediumNet.net.nml']:
file_name = '%s/../examples/test_files/%s'%(self.base_dir,f)
-
+
print("Loading %s"%file_name)
-
+
nml_doc0 = loaders.read_neuroml2_file(file_name,include_includes=True)
summary0 = nml_doc0.summary()
-
+
print(summary0)
-
+
nml_h5_file = '%s/../examples/tmp/%s__1.h5'%(self.base_dir,f)
writers.NeuroMLHdf5Writer.write(nml_doc0, nml_h5_file)
print("Written to: %s"%nml_h5_file)
-
+
nml_doc1 = loaders.read_neuroml2_file(nml_h5_file,include_includes=True,optimized=True)
summary1 = nml_doc1.summary().replace(' (optimized)','')
print('\n'+summary1)
-
+
compare(summary0,summary1)
-
nml_h5_file_2 = '%s/../examples/tmp/%s__2.h5'%(self.base_dir,f)
writers.NeuroMLHdf5Writer.write(nml_doc1, nml_h5_file_2)
print("Written to: %s"%nml_h5_file_2)
@@ -61,23 +60,23 @@ def test_write_load(self):
summary2 = nml_doc2.summary()
print("Reloaded: %s"%nml_h5_file_2)
print('\n'+summary2)
-
+
compare(summary0,summary2)
-
+
nml_h5_file_3 = '%s/../examples/tmp/%s__3.nml'%(self.base_dir,f)
writers.NeuroMLWriter.write(nml_doc1, nml_h5_file_3)
print("Written to: %s"%nml_h5_file_3)
-
+
nml_doc3 = loaders.read_neuroml2_file(nml_h5_file_3,include_includes=True)
summary3 = nml_doc3.summary()
print("Reloaded: %s"%nml_h5_file_3)
print('\n'+summary3)
-
+
compare(summary0,summary3)
-
+
if __name__ == '__main__':
-
+
tnxp = TestNeuroMLHDF5Optimized()
tnxp.test_write_load()
From b0e7f8e2ff58d66c9bb226a087ff504d8f4d33f3 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 12:26:13 +0000
Subject: [PATCH 30/44] fix: replace deprecated getargspec with getfullargspec
https://docs.python.org/3/library/inspect.html#inspect.getargspec
---
neuroml/hdf5/NeuroMLHdf5Parser.py | 4 ++--
neuroml/hdf5/NeuroMLXMLParser.py | 6 +++---
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/neuroml/hdf5/NeuroMLHdf5Parser.py b/neuroml/hdf5/NeuroMLHdf5Parser.py
index 69c7b738..b5b0b395 100644
--- a/neuroml/hdf5/NeuroMLHdf5Parser.py
+++ b/neuroml/hdf5/NeuroMLHdf5Parser.py
@@ -500,7 +500,7 @@ def start_group(self, g):
else:
component_obj = None
- if 'properties' in inspect.getargspec(self.netHandler.handle_population)[0]:
+ if 'properties' in inspect.getfullargspec(self.netHandler.handle_population)[0]:
self.netHandler.handle_population(self.currPopulation, self.currentComponent, size, component_obj=component_obj, properties=properties)
else:
self.netHandler.handle_population(self.currPopulation, self.currentComponent, size, component_obj=component_obj)
@@ -654,4 +654,4 @@ def end_group(self, g):
-
\ No newline at end of file
+
diff --git a/neuroml/hdf5/NeuroMLXMLParser.py b/neuroml/hdf5/NeuroMLXMLParser.py
index acadb0f8..cc82f988 100644
--- a/neuroml/hdf5/NeuroMLXMLParser.py
+++ b/neuroml/hdf5/NeuroMLXMLParser.py
@@ -95,7 +95,7 @@ def parse(self, filename):
if len(population.instances)>0 and population.type=='populationList':
- if 'properties' in inspect.getargspec(self.netHandler.handle_population)[0]:
+ if 'properties' in inspect.getfullargspec(self.netHandler.handle_population)[0]:
self.netHandler.handle_population(population.id,
population.component,
len(population.instances),
@@ -118,7 +118,7 @@ def parse(self, filename):
loc.z)
else:
- if 'properties' in inspect.getargspec(self.netHandler.handle_population)[0]:
+ if 'properties' in inspect.getfullargspec(self.netHandler.handle_population)[0]:
self.netHandler.handle_population(population.id,
population.component,
population.size,
@@ -421,4 +421,4 @@ def parse(self, filename):
-
\ No newline at end of file
+
From d59aaf4d3b1470227be31ec77ec46ea8e26f21ff Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 13:08:29 +0000
Subject: [PATCH 31/44] fix: close opened file handles
---
neuroml/loaders.py | 26 +++++++++++++-------------
1 file changed, 13 insertions(+), 13 deletions(-)
diff --git a/neuroml/loaders.py b/neuroml/loaders.py
index 9939daa8..c0ee8eb2 100644
--- a/neuroml/loaders.py
+++ b/neuroml/loaders.py
@@ -151,6 +151,7 @@ def load(cls,file):
json_string = fileh.read()
unpickled = json_decode(json_string)
+ fileh.close()
return unpickled
@classmethod
@@ -210,22 +211,21 @@ def load(cls, filepath):
TODO: Complete refactoring.
"""
import tables
- file = tables.open_file(filepath,mode='r')
+ with tables.open_file(filepath,mode='r') as file:
- document = neuroml.NeuroMLDocument()
+ document = neuroml.NeuroMLDocument()
- for node in file.root:
- if hasattr(node,'vertices'):
- loaded_morphology = cls.__extract_morphology(node)
- document.morphology.append(loaded_morphology)
- else:
- for morphology in node:
- loaded_morphology = cls.__extract_morphology(morphology)
+ for node in file.root:
+ if hasattr(node,'vertices'):
+ loaded_morphology = cls.__extract_morphology(node)
document.morphology.append(loaded_morphology)
-
+ else:
+ for morphology in node:
+ loaded_morphology = cls.__extract_morphology(morphology)
+ document.morphology.append(loaded_morphology)
+
return document
-
-
+
def read_neuroml2_file(nml2_file_name, include_includes=False, verbose=False,
already_included=[], print_method=print_, optimized=False):
@@ -311,4 +311,4 @@ def _read_neuroml2(nml2_file_name_or_string, include_includes=False, verbose=Fal
if __name__ == '__main__':
nml_doc = read_neuroml2_file(sys.argv[1])
- print(nml_doc.summary())
\ No newline at end of file
+ print(nml_doc.summary())
From 4a25efac47e9f1ec23ac9ec752dd409bf00ca503 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 13:08:50 +0000
Subject: [PATCH 32/44] enh: ensure that file is closed even if an exception is
raised
Since a TestNeuroMLWriter.test_write_nonsense tests for this, we raise
the exception again.
Ideally, this exception should not propagate to the test: it should be
correctly handled in the code itself.
---
neuroml/writers.py | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/neuroml/writers.py b/neuroml/writers.py
index 6cadee2d..493c0a1f 100644
--- a/neuroml/writers.py
+++ b/neuroml/writers.py
@@ -20,8 +20,12 @@ def write(cls,nmldoc,file,close=True):
namespacedef += ' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"'
namespacedef += ' xsi:schemaLocation="http://www.neuroml.org/schema/neuroml2 https://raw.github.com/NeuroML/NeuroML2/development/Schemas/NeuroML2/NeuroML_%s.xsd"'%neuroml.current_neuroml_version
- nmldoc.export(file,0,name_="neuroml",
- namespacedef_=namespacedef) #name_ param to ensure root element named correctly - generateDS limitation
+ try:
+ nmldoc.export(file,0,name_="neuroml",
+ namespacedef_=namespacedef) #name_ param to ensure root element named correctly - generateDS limitation
+ except AttributeError as ae:
+ file.close()
+ raise(ae)
if close:
file.close()
From 2f03e2314263568784af233dd03e48fee540fa50 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 13:15:50 +0000
Subject: [PATCH 33/44] test: use pytest instead of deprecated nose
Nose is now deprecated, so we move to pytest: https://nose.readthedocs.io/en/latest/
---
doc/install.txt | 11 +++++++++--
neuroml/test/README | 2 +-
requirements.txt | 2 +-
setup.py | 2 +-
4 files changed, 12 insertions(+), 5 deletions(-)
diff --git a/doc/install.txt b/doc/install.txt
index 20eef57a..0c332972 100644
--- a/doc/install.txt
+++ b/doc/install.txt
@@ -118,8 +118,15 @@ If everything worked your output should look something like this:
OK
-Alternatively install and use nosetests:
+Alternatively install and use pytest:
::
- nosetests -v
\ No newline at end of file
+ pytest -v --strict -W all
+
+
+To ignore some tests, like the mongodb test which requres a mongodb setup, run:
+
+::
+
+ pytest -v -k "not mongodb" --strict -W all
diff --git a/neuroml/test/README b/neuroml/test/README
index fcbf8f63..9a6d65d3 100644
--- a/neuroml/test/README
+++ b/neuroml/test/README
@@ -4,4 +4,4 @@ python -m unittest discover
or:
-nosetests
\ No newline at end of file
+pytest
diff --git a/requirements.txt b/requirements.txt
index 86d02b42..98c0f412 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -7,4 +7,4 @@ numexpr
simplejson
tables>=3.3.0
jsonpickle>=0.9.6
-nose
+pytest
diff --git a/setup.py b/setup.py
index 0c5bee2d..bc50d355 100644
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,7 @@
long_description = long_description,
long_description_content_type="text/markdown",
install_requires=['lxml', 'six'],
- tests_require=["nose"],
+ tests_require=["pytest"],
extras_require={"full": [
"cython",
"numpy",
From 460cdf9f4f49783b552ad8323fb04d960e7f5d53 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 13:51:31 +0000
Subject: [PATCH 34/44] chore: update travis to use pytest
---
.travis.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.travis.yml b/.travis.yml
index f1005400..10fcab81 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -20,7 +20,7 @@ install:
# command to run tests and examples, e.g. python setup.py test
script:
- - cd ./neuroml/test && nosetests -vs
+ - cd ./neuroml/test && pytest -v --strict -W all
- cd ../examples && python run_all.py
services: mongodb
From 1ee805b424d9203e8b5a0ed3608566dc49d18ace Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 14:06:11 +0000
Subject: [PATCH 35/44] fix: use getargspec on py2, getfullargspec on py3
Since we still support Py2.7, we must support both for the time being.
---
neuroml/hdf5/NeuroMLHdf5Parser.py | 9 ++++-
neuroml/hdf5/NeuroMLXMLParser.py | 57 +++++++++++++++++++------------
2 files changed, 43 insertions(+), 23 deletions(-)
diff --git a/neuroml/hdf5/NeuroMLHdf5Parser.py b/neuroml/hdf5/NeuroMLHdf5Parser.py
index b5b0b395..17e5ed7a 100644
--- a/neuroml/hdf5/NeuroMLHdf5Parser.py
+++ b/neuroml/hdf5/NeuroMLHdf5Parser.py
@@ -500,7 +500,14 @@ def start_group(self, g):
else:
component_obj = None
- if 'properties' in inspect.getfullargspec(self.netHandler.handle_population)[0]:
+ # Try for Python3
+ try:
+ args = inspect.getfullargspec(self.netHandler.handle_population)[0]
+ except AttributeError:
+ # Fall back for Python 2
+ args = inspect.getargspec(self.netHandler.handle_population)[0]
+
+ if 'properties' in args:
self.netHandler.handle_population(self.currPopulation, self.currentComponent, size, component_obj=component_obj, properties=properties)
else:
self.netHandler.handle_population(self.currPopulation, self.currentComponent, size, component_obj=component_obj)
diff --git a/neuroml/hdf5/NeuroMLXMLParser.py b/neuroml/hdf5/NeuroMLXMLParser.py
index cc82f988..43160e1e 100644
--- a/neuroml/hdf5/NeuroMLXMLParser.py
+++ b/neuroml/hdf5/NeuroMLXMLParser.py
@@ -95,17 +95,24 @@ def parse(self, filename):
if len(population.instances)>0 and population.type=='populationList':
- if 'properties' in inspect.getfullargspec(self.netHandler.handle_population)[0]:
- self.netHandler.handle_population(population.id,
- population.component,
- len(population.instances),
- component_obj=component_obj,
- properties=properties)
+ # Try for Python3
+ try:
+ args = inspect.getfullargspec(self.netHandler.handle_population)[0]
+ except AttributeError:
+ # Fall back for Python 2
+ args = inspect.getargspec(self.netHandler.handle_population)[0]
+
+ if 'properties' in args:
+ self.netHandler.handle_population(population.id,
+ population.component,
+ len(population.instances),
+ component_obj=component_obj,
+ properties=properties)
else:
- self.netHandler.handle_population(population.id,
- population.component,
- len(population.instances),
- component_obj=component_obj)
+ self.netHandler.handle_population(population.id,
+ population.component,
+ len(population.instances),
+ component_obj=component_obj)
for inst in population.instances:
@@ -117,19 +124,25 @@ def parse(self, filename):
loc.y, \
loc.z)
else:
-
- if 'properties' in inspect.getfullargspec(self.netHandler.handle_population)[0]:
- self.netHandler.handle_population(population.id,
- population.component,
- population.size,
- component_obj=component_obj,
- properties=properties)
+ # Try for Python3
+ try:
+ args = inspect.getfullargspec(self.netHandler.handle_population)[0]
+ except AttributeError:
+ # Fall back for Python 2
+ args = inspect.getargspec(self.netHandler.handle_population)[0]
+
+ if 'properties' in args:
+ self.netHandler.handle_population(population.id,
+ population.component,
+ population.size,
+ component_obj=component_obj,
+ properties=properties)
else:
- self.netHandler.handle_population(population.id,
- population.component,
- population.size,
- component_obj=component_obj)
-
+ self.netHandler.handle_population(population.id,
+ population.component,
+ population.size,
+ component_obj=component_obj)
+
for i in range(population.size):
self.netHandler.handle_location(i, \
population.id, \
From dd25c6af532ba87ffc46cb2cbaf7fd30f20012a5 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Fri, 26 Feb 2021 14:20:14 +0000
Subject: [PATCH 36/44] undo: re-add python version limits for simplejson
---
requirements.txt | 2 +-
setup.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/requirements.txt b/requirements.txt
index 98c0f412..338e612b 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,7 +4,7 @@ cython
numpy
pymongo
numexpr
-simplejson
+simplejson; python_version < '3.5'
tables>=3.3.0
jsonpickle>=0.9.6
pytest
diff --git a/setup.py b/setup.py
index bc50d355..b830999f 100644
--- a/setup.py
+++ b/setup.py
@@ -25,7 +25,7 @@
"numpy",
"pymongo",
"numexpr",
- "simplejson",
+ "simplejson; python_version < '3.5'",
"tables>=3.3.0",
"jsonpickle>=0.9.6"
]},
From a8b3c5b0f919e928bce641074ae15b0b44562926 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Mon, 1 Mar 2021 15:31:29 +0000
Subject: [PATCH 37/44] chore: add status badges
---
README.md | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/README.md b/README.md
index 45ef5928..204abcb4 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,14 @@
## Introduction
+[![Travis CI](https://travis-ci.org/NeuralEnsemble/libNeuroML.svg?branch=master)](https://travis-ci.org/NeuralEnsemble/libNeuroML)
+[![PyPI](https://img.shields.io/pypi/v/libNeuroML)](https://pypi.org/project/libNeuroML/)
+[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/libNeuroML)](https://pypi.org/project/libNeuroML/)
+[![GitHub](https://img.shields.io/github/license/NeuralEnsemble/libNeuroML)](https://github.com/NeuralEnsemble/libNeuroML/blob/master/LICENSE)
+[![GitHub pull requests](https://img.shields.io/github/issues-pr/NeuralEnsemble/libNeuroML)](https://github.com/NeuralEnsemble/libNeuroML/pulls)
+[![GitHub issues](https://img.shields.io/github/issues/NeuralEnsemble/libNeuroML)](https://github.com/NeuralEnsemble/libNeuroML/issues)
+[![GitHub Org's stars](https://img.shields.io/github/stars/NeuralEnsemble?style=social)](https://github.com/NeuralEnsemble)
+[![Twitter Follow](https://img.shields.io/twitter/follow/NeuroML?style=social)](https://twitter.com/NeuroML)
+
This package provides Python libNeuroML, for working with neuronal models specified in [NeuroML 2](http://neuroml.org/neuromlv2).
For more about libNeuroML see:
From 0185b0edcd4ba90e89b2839d0119d6adc5dd560b Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Mon, 1 Mar 2021 15:44:13 +0000
Subject: [PATCH 38/44] chore: limit python support strings to ones tested in
CI only
---
setup.py | 8 --------
1 file changed, 8 deletions(-)
diff --git a/setup.py b/setup.py
index b830999f..d482d434 100644
--- a/setup.py
+++ b/setup.py
@@ -37,16 +37,8 @@
'Natural Language :: English',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
- 'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.1',
- 'Programming Language :: Python :: 3.2',
- 'Programming Language :: Python :: 3.3',
- 'Programming Language :: Python :: 3.3',
- 'Programming Language :: Python :: 3.4',
- 'Programming Language :: Python :: 3.5',
- 'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
From 1d5f060f4e5601b82098115127497d8d5cb6d488 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Thu, 11 Mar 2021 09:46:50 +0000
Subject: [PATCH 39/44] fix: update for py3
---
neuroml/benchmarks/arraymorph_benchmarks.py | 110 +++++++++-----------
1 file changed, 52 insertions(+), 58 deletions(-)
diff --git a/neuroml/benchmarks/arraymorph_benchmarks.py b/neuroml/benchmarks/arraymorph_benchmarks.py
index 4454ad37..7c779a4f 100644
--- a/neuroml/benchmarks/arraymorph_benchmarks.py
+++ b/neuroml/benchmarks/arraymorph_benchmarks.py
@@ -1,15 +1,16 @@
"""
Benchmarks for reading and writing arraymorphs.
"""
+from __future__ import print_function
import numpy as np
from neuroml import arraymorph as am
import neuroml
import neuroml.writers
import tempfile
from matplotlib import pyplot as plt
-import time
+import time
import os
-from os import path
+
def timeit(method):
@@ -18,12 +19,12 @@ def timed(*args, **kw):
result = method(*args, **kw)
te = time.time()
- print '%r (%r, %r) %2.2f sec' % \
- (method.__name__, args, kw, te-ts)
+ print('%r (%r, %r) %2.2f sec' % (method.__name__, args, kw, te-ts))
return te-ts
return timed
+
class AMWriteBenchmark(object):
"""
TODO: Get rid of methods which are not used
@@ -38,11 +39,11 @@ def __init__(self,num_segments=1e6):
d = np.linspace(1,0.01,num_vertices)
vertices = np.array([x,y,z,d]).T
-
+
connectivity = range(-1,num_segments)
- big_arraymorph = am.ArrayMorphology(vertices = vertices,
- connectivity = connectivity)
+ big_arraymorph = am.ArrayMorphology(vertices=vertices,
+ connectivity=connectivity)
self.big_arraymorph = big_arraymorph
@@ -57,10 +58,10 @@ def __init__(self,num_segments=1e6):
self.__write_time = None
self.num_segments = num_segments
-
+
@property
def write_time(self):
- if self.__write_time == None:
+ if self.__write_time is None:
print("Benchmark has not been executed")
else:
return self.__write_time
@@ -77,7 +78,6 @@ def run_json(self):
def run_hdf5(self):
self.test_write_big_arraymorph_hdf5()
-
def test_write_big_arraymorph_json(self):
writer_method = neuroml.writers.JSONWriter.write
fh,filename = tempfile.mkstemp()
@@ -87,10 +87,10 @@ def test_write_big_arraymorph_json(self):
except:
self.fail("Exception raised!")
- print 'JSON Number of segments:'
- print self.num_segments
- print 'JSON size in bytes:'
- print self.file_size(filename)
+ print('JSON Number of segments:')
+ print(self.num_segments)
+ print('JSON size in bytes:')
+ print(self.file_size(filename))
os.close(fh)
@@ -103,17 +103,16 @@ def test_write_big_arraymorph_neuroml(self):
except:
self.fail("Exception raised!")
- print 'NeuroML (XML) Number of segments:'
- print self.num_segments
- print 'NeuroML (XML) size in bytes:'
- print self.file_size(filename)
+ print('NeuroML (XML) Number of segments:')
+ print(self.num_segments)
+ print('NeuroML (XML) size in bytes:')
+ print(self.file_size(filename))
os.close(fh)
def file_size(self,path):
return os.path.getsize(path)
-
def test_write_big_arraymorph_hdf5(self):
writer_method = neuroml.writers.ArrayMorphWriter.write
fh,filename = tempfile.mkstemp()
@@ -123,10 +122,10 @@ def test_write_big_arraymorph_hdf5(self):
except:
self.fail("Exception raised!")
- print 'HDF5 Number of segments:'
- print self.num_segments
- print 'HDF5 size in bytes:'
- print self.file_size(filename)
+ print('HDF5 Number of segments:')
+ print(self.num_segments)
+ print('HDF5 size in bytes:')
+ print(self.file_size(filename))
os.close(fh)
@@ -148,10 +147,9 @@ def test_write_expected(self):
physical_masks_equal = np.testing.assert_array_equal(array_morph.physical_mask,self.big_arraymorph.physical_mask)
vertices_equal = np.testing.assert_array_equal(array_morph.vertices,self.big_arraymorph.vertices)
-
- self.assertEqual(connectivity_equal,None) #None when equal
- self.assertEqual(physical_masks_equal,None) #None when equal
- self.assertEqual(vertices_equal,None) #None when equal
+ self.assertEqual(connectivity_equal,None) # None when equal
+ self.assertEqual(physical_masks_equal,None) # None when equal
+ self.assertEqual(vertices_equal,None)# None when equal
def test_write_multiple_morphologies(self):
filename = tempfile.mkstemp()[1]
@@ -173,9 +171,6 @@ def test_write_multiple_morphologies(self):
self.assertIsInstance(document,neuroml.NeuroMLDocument)
-
-
-
def benchmark_arraymorph_writer():
"""
TODO: Add NeuroML Document benchmark
@@ -197,12 +192,12 @@ def benchmark_arraymorph_writer():
neuroml_num_segments_list = []
for i in range(30):
- print "test %d" % (i)
+ print("test %d" % (i))
neuroml_num_segments_factor = 4e2
json_num_segments_factor = 4e2
hdf5_num_segments_factor = 4e2
-
+
neuroml_num_segments = i * neuroml_num_segments_factor
json_num_segments = i * json_num_segments_factor
hdf5_num_segments = i * hdf5_num_segments_factor
@@ -231,7 +226,6 @@ def benchmark_arraymorph_writer():
np.savetxt("json_results.csv", json_results, delimiter=",")
np.savetxt("hdf5_results.csv", hdf5_results, delimiter=",")
-
neuroml_runtimes_averaged = np.mean(neuroml_results,axis=0)
json_runtimes_averaged = np.mean(json_results,axis=0)
hdf5_runtimes_averaged = np.mean(hdf5_results,axis=0)
@@ -244,29 +238,28 @@ def benchmark_arraymorph_writer():
json_num_segments_list = np.array(json_num_segments_list)
hdf5_num_segments_list = np.array(hdf5_num_segments_list)
-
plt_neuroml = plt.errorbar(neuroml_num_segments_list,
- neuroml_runtimes_averaged,
- yerr=hdf5_errors,
- marker='o',
- color='k',
- ecolor='k',
- markerfacecolor='r',
- label="series 2",
- capsize=5,)
+ neuroml_runtimes_averaged,
+ yerr=hdf5_errors,
+ marker='o',
+ color='k',
+ ecolor='k',
+ markerfacecolor='r',
+ label="series 2",
+ capsize=5,)
plt.title("ArrayMorph write to disk benchmark (NeuroML (XML) serialization)")
plt.xlabel("Number of segments in morphology (Units of 1000 segments)")
plt.ylabel("Time to write to disk (s)")
plt_hdf5 = plt.errorbar(hdf5_num_segments_list,
- hdf5_runtimes_averaged,
- yerr=hdf5_errors,
- marker='o',
- color='k',
- ecolor='k',
- markerfacecolor='g',
- label="series 2",
- capsize=5,)
+ hdf5_runtimes_averaged,
+ yerr=hdf5_errors,
+ marker='o',
+ color='k',
+ ecolor='k',
+ markerfacecolor='g',
+ label="series 2",
+ capsize=5,)
plt.title("ArrayMorph write to disk benchmark (HDF5 serialization)")
plt.xlabel("Number of segments in morphology (Units of 1000 segments)")
plt.ylabel("Time to write to disk (s)")
@@ -274,14 +267,14 @@ def benchmark_arraymorph_writer():
# plt.show()
plt_json = plt.errorbar(json_num_segments_list,
- json_runtimes_averaged,
- yerr=json_errors,
- marker='o',
- color='k',
- ecolor='k',
- markerfacecolor='b',
- label="series 2",
- capsize=5,)
+ json_runtimes_averaged,
+ yerr=json_errors,
+ marker='o',
+ color='k',
+ ecolor='k',
+ markerfacecolor='b',
+ label="series 2",
+ capsize=5,)
plt.title("ArrayMorph write to disk benchmarks for JSON, HDF5 and NeuroML serialization formats")
plt.xlabel("Number of segments in morphology")
@@ -293,6 +286,7 @@ def benchmark_arraymorph_writer():
plt.xscale('log')
plt.show()
+
#prototype:
if __name__ == "__main__":
benchmark_arraymorph_writer()
From fcbff532c6793fd72ad02572956adddbd21a76a5 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Thu, 11 Mar 2021 10:02:38 +0000
Subject: [PATCH 40/44] enh: setup GH Actions CI
pytables seems to provide wheels for py3.9 now, so we don't need to apt
install anything as we do on travis.
Fixes #97
---
.github/workflows/ci.yml | 51 ++++++++++++++++++++++++++++++++++++++++
1 file changed, 51 insertions(+)
create mode 100644 .github/workflows/ci.yml
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 00000000..79943c09
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,51 @@
+name: Build
+
+on:
+ push:
+ branches: [ master, development ]
+ pull_request:
+ branches: [ master, development ]
+
+jobs:
+ build:
+
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python-version: [2.7, 3.7, 3.8, 3.9]
+
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Start MongoDB
+ uses: supercharge/mongodb-github-action@1.3.0
+
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ python -m pip install flake8 pytest
+ if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
+
+ - name: Build package
+ run: |
+ pip install .[full]
+
+ - name: Test with pytest
+ run: |
+ pytest
+
+ - name: Run examples
+ run: |
+ cd examples && python run_all.py
+
+ - name: Lint with flake8
+ run: |
+ # stop the build if there are Python syntax errors or undefined names
+ flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
+ # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
+ flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
From 18361d2d97e113aa31a1ab78c1a390c5208675eb Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Thu, 11 Mar 2021 10:06:29 +0000
Subject: [PATCH 41/44] enh: fix example location in GH action CI
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 79943c09..db7af520 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -41,7 +41,7 @@ jobs:
- name: Run examples
run: |
- cd examples && python run_all.py
+ cd ./neuroml/examples && python run_all.py
- name: Lint with flake8
run: |
From 9c924b8068b045fca0619b5dfd2aeae7c51f66c0 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Thu, 11 Mar 2021 10:23:40 +0000
Subject: [PATCH 42/44] fix: include necessary imports
---
neuroml/arraymorph_load_time_benchmark.py | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/neuroml/arraymorph_load_time_benchmark.py b/neuroml/arraymorph_load_time_benchmark.py
index 0ebaad7e..9df6d8f1 100644
--- a/neuroml/arraymorph_load_time_benchmark.py
+++ b/neuroml/arraymorph_load_time_benchmark.py
@@ -1,5 +1,10 @@
+import numpy as np
+import neuroml
+import neuroml.arraymorph as am
+
+
class Benchmark:
- def __init__(num_segments):
+ def __init__(self, num_segments):
self.num_segments = num_segments
def set_up(self):
From 9deb15147c94736683960bca63940cb5e57f6af3 Mon Sep 17 00:00:00 2001
From: "Ankur Sinha (Ankur Sinha Gmail)"
Date: Thu, 11 Mar 2021 10:30:11 +0000
Subject: [PATCH 43/44] fix: ignore flake false positives about undefined names
NOTE: MorphologyImportError is undefined and should be fixed.
---
neuroml/loaders.py | 3 ++-
neuroml/nml/nml.py | 4 ++--
2 files changed, 4 insertions(+), 3 deletions(-)
diff --git a/neuroml/loaders.py b/neuroml/loaders.py
index c0ee8eb2..a37dc749 100644
--- a/neuroml/loaders.py
+++ b/neuroml/loaders.py
@@ -114,7 +114,8 @@ def load_swc_single(cls, src, name=None):
if len(id_to_index_dict) != len(index_to_id):
s = "Internal Error Loading SWC: Index and ID map are different lengths."
s += " [ID:%d, Index:%d]"%( len(index_to_id), len(id_to_index_dict) )
- raise MorphologyImportError(s)
+ # TODO: this is undefined!!
+ raise MorphologyImportError(s) # noqa: F821
# Vertices and section types are easy:
vertices = d[ ['x','y','z','r'] ]
diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py
index 1af4fbc1..b8281f42 100644
--- a/neuroml/nml/nml.py
+++ b/neuroml/nml/nml.py
@@ -35,7 +35,7 @@
Validate_simpletypes_ = True
if sys.version_info.major == 2:
- BaseStrType_ = basestring
+ BaseStrType_ = basestring # noqa: F821
else:
BaseStrType_ = str
@@ -415,7 +415,7 @@ def gds_encode(instring):
def convert_unicode(instring):
if isinstance(instring, str):
result = quote_xml(instring)
- elif sys.version_info.major == 2 and isinstance(instring, unicode):
+ elif sys.version_info.major == 2 and isinstance(instring, unicode): # noqa: F821
result = quote_xml(instring).encode('utf8')
else:
result = GeneratedsSuper.gds_encode(str(instring))
From dba03cf2ba6aebd6c811035df47f2f3dabcb4d6b Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Mon, 22 Mar 2021 11:18:56 +0000
Subject: [PATCH 44/44] Bump to v0.2.55 for merge to master
---
neuroml/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/neuroml/__init__.py b/neuroml/__init__.py
index 4fc1d351..1a7fb55e 100644
--- a/neuroml/__init__.py
+++ b/neuroml/__init__.py
@@ -1,6 +1,6 @@
from .nml.nml import * # allows importation of all neuroml classes
-__version__ = '0.2.54'
+__version__ = '0.2.55'
__version_info__ = tuple(int(i) for i in __version__.split('.'))