Skip to content

Commit

Permalink
Merge pull request #1873 from PrincetonUniversity/devel
Browse files Browse the repository at this point in the history
Devel
  • Loading branch information
dillontsmith authored Jan 11, 2021
2 parents f136d8b + 20eef5b commit 9f942f6
Show file tree
Hide file tree
Showing 13 changed files with 598 additions and 149 deletions.
30 changes: 16 additions & 14 deletions .github/workflows/pnl-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,24 +27,24 @@ jobs:
if: startsWith(runner.os, 'Linux')
with:
path: ~/.cache/pip/wheels
key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-${{ github.sha }}
restore-keys: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels
key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2-${{ github.sha }}
restore-keys: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2

- name: MacOS wheels cache
uses: actions/[email protected]
if: startsWith(runner.os, 'macOS')
with:
path: ~/Library/Caches/pip/wheels
key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-${{ github.sha }}
restore-keys: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels
key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2-${{ github.sha }}
restore-keys: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2

- name: Windows wheels cache
uses: actions/[email protected]
if: startsWith(runner.os, 'Windows')
with:
path: ~\AppData\Local\pip\Cache\wheels
key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-${{ github.sha }}
restore-keys: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels
key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2-${{ github.sha }}
restore-keys: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2

- name: Set up Python ${{ matrix.python-version }}
uses: actions/[email protected]
Expand All @@ -64,17 +64,19 @@ jobs:
run: choco install --no-progress -y graphviz --version=2.38.0.20190211
if: startsWith(runner.os, 'Windows')

- name: Windows pytorch
run: |
python -m pip install --upgrade pip wheel
pip install torch -f https://download.pytorch.org/whl/cpu/torch_stable.html
if: startsWith(runner.os, 'Windows') && matrix.python-architecture != 'x86'
- name: Shared dependencies
shell: bash
run: |
python -m pip install --upgrade pip wheel
# explicitly install numpy (https://github.com/pypa/pip/issues/9239)
python -m pip install --upgrade pip wheel $(grep numpy requirements.txt)
pip install -e .[dev]
- name: Windows pytorch
shell: bash
run: |
pip install $(grep -o 'torch[0-9<=\.]*' requirements.txt) -f https://download.pytorch.org/whl/cpu/torch_stable.html
if: startsWith(runner.os, 'Windows') && matrix.python-architecture != 'x86'

- name: Cleanup old wheels
shell: bash
run: |
Expand All @@ -101,7 +103,7 @@ jobs:
run: pytest --junit-xml=tests_out.xml --verbosity=0 -n auto --maxprocesses=2

- name: Upload test results
uses: actions/[email protected].1
uses: actions/[email protected].2
with:
name: test-results-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.python-architecture }}
path: tests_out.xml
Expand All @@ -115,7 +117,7 @@ jobs:
if: contains(github.ref, 'tags')

- name: Upload dist packages
uses: actions/[email protected].1
uses: actions/[email protected].2
with:
name: dist-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.python-architecture }}
path: dist/
Expand Down
3 changes: 2 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@ dist: bionic
arch:
- amd64
- arm64
- ppc64le
# Disabled due to intermittent failures and long running times
# - ppc64le
# Disabled until grpcio works with s390x
# https://github.com/grpc/grpc/issues/23797
# - s390x
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,19 +17,20 @@
"""

import abc
import collections
import typecheck as tc
import warnings
import numbers

import numpy as np

from psyneulink.core import llvm as pnlvm
from psyneulink.core.components.component import DefaultsFlexibility, _has_initializers_setter
from psyneulink.core.components.component import DefaultsFlexibility, _has_initializers_setter, ComponentsMeta
from psyneulink.core.components.functions.function import Function_Base, FunctionError
from psyneulink.core.components.functions.distributionfunctions import DistributionFunction
from psyneulink.core.globals.keywords import STATEFUL_FUNCTION_TYPE, STATEFUL_FUNCTION, NOISE, RATE
from psyneulink.core.globals.parameters import Parameter
from psyneulink.core.globals.utilities import parameter_spec, iscompatible, object_has_single_value, convert_to_np_array
from psyneulink.core.globals.utilities import parameter_spec, iscompatible, object_has_single_value, convert_to_np_array, contains_type
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.context import ContextFlags, handle_external_context

Expand Down Expand Up @@ -196,6 +197,15 @@ class Parameters(Function_Base.Parameters):
initializer = Parameter(np.array([0]), pnl_internal=True)
has_initializers = Parameter(True, setter=_has_initializers_setter, pnl_internal=True)

def _validate_noise(self, noise):
if (
isinstance(noise, collections.Iterable)
# assume ComponentsMeta are functions
and contains_type(noise, ComponentsMeta)
):
# TODO: make this validation unnecessary by handling automatically?
return 'functions in a list must be instantiated and have the desired noise variable shape'

@handle_external_context()
@tc.typecheck
def __init__(self,
Expand Down
25 changes: 15 additions & 10 deletions psyneulink/core/compositions/composition.py
Original file line number Diff line number Diff line change
Expand Up @@ -8029,16 +8029,21 @@ def run(
a cycle is not specified, it is assigned its `default values <Parameter_Defaults>` when initialized
(see `Composition_Cycles_and_Feedback` additional details).

reset_stateful_functions_to : Dict { Node : Object | iterable [Object] } : default None
object or iterable of objects to be passed as arguments to nodes' reset methods when their
respective reset_stateful_function_when conditions are met. These are used to seed the stateful attributes
of Mechanisms that have stateful functions. If a node's reset_stateful_function_when condition is set to
Never, but they are listed in the reset_stateful_functions_to dict, then they will be reset once at the
beginning of the run, using the provided values.

reset_stateful_functions_when : Condition : default Never()
sets the reset_stateful_function_when condition for all nodes in the Composition that currently have their
reset_stateful_function_when condition set to `Never <Never>` for the duration of the run.
reset_stateful_functions_to : Dict { Node : Object | iterable [Object] } : default None
object or iterable of objects to be passed as arguments to nodes' reset methods when their
respective reset_stateful_function_when conditions are met. These are used to seed the stateful attributes
of Mechanisms that have stateful functions. If a node's reset_stateful_function_when condition is set to
Never, but they are listed in the reset_stateful_functions_to dict, then they will be reset once at the
beginning of the run, using the provided values. For a more in depth explanation of this argument, see
`Resetting Parameters of StatefulFunctions <Composition_Reset>`.

reset_stateful_functions_when : Dict { Node: Condition } | Condition : default Never()
if type is dict, sets the reset_stateful_function_when attribute for each key Node to its corresponding value
Condition. if type is Condition, sets the reset_stateful_function_when attribute for all nodes in the
Composition that currently have their reset_stateful_function_when conditions set to `Never <Never>`.
in either case, the specified Conditions persist only for the duration of the run, after which the nodes'
reset_stateful_functions_when attributes are returned to their previous Conditions. For a more in depth
explanation of this argument, see `Resetting Parameters of StatefulFunctions <Composition_Reset>`.

skip_initialization : bool : default False

Expand Down
11 changes: 9 additions & 2 deletions psyneulink/core/globals/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -1401,7 +1401,10 @@ def nparray(self,
# If any time values are empty, revert to indexing the entries;
# this requires that all entries have the same length
else:
max_len = max([len(self.logged_entries[e][eid]) for e in entries])
try:
max_len = max([len(self.logged_entries[e][eid]) for e in entries])
except KeyError:
max_len = 0

# If there are no time values, only support entries of the same length
# Must dealias both e and zeroth entry because either/both of these could be 'value'
Expand Down Expand Up @@ -1751,7 +1754,11 @@ def _assemble_entry_data(self, entry, time_values, execution_id=None):
# entry = self._dealias_owner_name(entry)
row = []
time_col = iter(time_values)
data = self.logged_entries[entry][execution_id]
try:
data = self.logged_entries[entry][execution_id]
except KeyError:
return [None]

time = next(time_col, None)
for i in range(len(self.logged_entries[entry][execution_id])):
# iterate through log entry tuples:
Expand Down
23 changes: 23 additions & 0 deletions psyneulink/core/globals/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,7 @@
'scalar_distance', 'sinusoid',
'tensor_power', 'TEST_CONDTION', 'type_match',
'underscore_to_camelCase', 'UtilitiesError', 'unproxy_weakproxy', 'create_union_set', 'merge_dictionaries',
'contains_type'
]

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -1789,3 +1790,25 @@ def gen_friendly_comma_str(items):
divider = f',{divider}'

return f"{', '.join(items[:-1])}{divider}{items[-1]}"


def contains_type(
arr: collections.Iterable,
typ: typing.Union[type, typing.Tuple[type, ...]]
) -> bool:
"""
Returns:
True if **arr** is a possibly nested Iterable that contains
an instance of **typ** (or one type in **typ** if tuple)
Note: `isinstance(**arr**, **typ**)` should be used to check
**arr** itself if needed
"""
try:
for a in arr:
if isinstance(a, typ) or (a is not arr and contains_type(a, typ)):
return True
except TypeError:
pass

return False
5 changes: 4 additions & 1 deletion psyneulink/core/llvm/builder_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ class LLVMBuilderContext:
_llvm_generation = 0
int32_ty = ir.IntType(32)
float_ty = ir.DoubleType()
bool_ty = ir.IntType(1)

def __init__(self):
self._modules = []
Expand Down Expand Up @@ -318,7 +319,9 @@ def convert_python_struct_to_llvm_ir(self, t):
return ir.ArrayType(elems_t[0], len(elems_t))
return ir.LiteralStructType(elems_t)
elif type(t) is tuple:
elems_t = (self.convert_python_struct_to_llvm_ir(x) for x in t)
elems_t = [self.convert_python_struct_to_llvm_ir(x) for x in t]
if len(elems_t) > 0 and all(x == elems_t[0] for x in elems_t):
return ir.ArrayType(elems_t[0], len(elems_t))
return ir.LiteralStructType(elems_t)
elif isinstance(t, enum.Enum):
# FIXME: Consider enums of non-int type
Expand Down
Loading

0 comments on commit 9f942f6

Please sign in to comment.