diff --git a/.github/actions/cleanup-pip-cache/action.yml b/.github/actions/cleanup-pip-cache/action.yml new file mode 100644 index 00000000000..029dec1b84d --- /dev/null +++ b/.github/actions/cleanup-pip-cache/action.yml @@ -0,0 +1,18 @@ +name: 'Cleanup pip wheels cache' +description: 'Purge pip wheels cache, keeping wheels for the installed pacakges' + +runs: + using: "composite" + steps: + - name: "Cleanup old wheels" + shell: bash + run: | + pip cache info + INSTALLED=`pip list | sed 's/-/_/g' | sed 's/ */-/' | tail -n+3` + CACHED=`pip cache list | cut -f 2,3 -d- | tail -n+3` + for P in $CACHED; do + # Remove cached and not installed + if [ `echo $INSTALLED | grep -o $P | wc -l` == "0" ] ; then + pip cache remove -v $P + fi + done diff --git a/.github/workflows/pnl-ci-docs.yml b/.github/workflows/pnl-ci-docs.yml new file mode 100644 index 00000000000..ea8f14e659d --- /dev/null +++ b/.github/workflows/pnl-ci-docs.yml @@ -0,0 +1,78 @@ +name: PsyNeuLink Docs CI + +on: push + +jobs: + build: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: [3.6, 3.7] # Doesn't work in 3.8 or 3.9 + python-architecture: ['x64'] + os: [ubuntu-latest, macos-latest, windows-latest] + + steps: + - name: Checkout sources + uses: actions/checkout@v2 + with: + fetch-depth: 10 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2.2.1 + with: + python-version: ${{ matrix.python-version }} + architecture: ${{ matrix.python-architecture }} + + - name: Get pip cache location + shell: bash + id: pip_cache + run: | + python -m pip install -U pip + python -m pip --version + echo ::set-output name=pip_cache_dir::$(python -m pip cache dir) + + - name: Wheels cache + uses: actions/cache@v2.1.3 + with: + path: ${{ steps.pip_cache.outputs.pip_cache_dir }}/wheels + key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2-${{ github.sha }} + restore-keys: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2 + + - name: MacOS dependencies + run: HOMEBREW_NO_AUTO_UPDATE=1 brew install graphviz + if: startsWith(runner.os, 'macOS') + + - name: Linux dependencies + run: sudo apt-get install -y graphviz + if: startsWith(runner.os, 'Linux') + + - name: Windows dependencies + run: choco install --no-progress -y graphviz --version=2.38.0.20190211 + if: startsWith(runner.os, 'Windows') + + - name: Shared dependencies + shell: bash + run: | + # explicitly install numpy (https://github.com/pypa/pip/issues/9239) + python -m pip install --upgrade pip wheel $(grep numpy requirements.txt) + pip install -e .[doc] + + - name: Windows pytorch + shell: bash + run: | + pip install $(grep -o 'torch[0-9<=\.]*' requirements.txt) -f https://download.pytorch.org/whl/cpu/torch_stable.html + if: startsWith(runner.os, 'Windows') && matrix.python-architecture != 'x86' + + - name: Cleanup old wheels + uses: ./.github/actions/cleanup-pip-cache + + - name: Build Documentation + run: sphinx-build -b html -aE docs/source pnl-html + + - name: Upload Documentation + uses: actions/upload-artifact@v2.2.2 + with: + name: Documentation-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.python-architecture }} + retention-days: 1 + path: pnl-html diff --git a/.github/workflows/pnl-ci.yml b/.github/workflows/pnl-ci.yml index 69c14b933b8..aed55b727ed 100644 --- a/.github/workflows/pnl-ci.yml +++ b/.github/workflows/pnl-ci.yml @@ -1,6 +1,14 @@ name: PsyNeuLink CI -on: [push, pull_request] +on: + push: + paths-ignore: + - 'docs/**' + - 'doc_requirements.txt' + pull_request: + paths-ignore: + - 'docs/**' + - 'doc_requirements.txt' jobs: build: @@ -18,40 +26,32 @@ jobs: os: windows-latest steps: - - uses: actions/checkout@v2 + - name: Checkout sources + uses: actions/checkout@v2 with: fetch-depth: 10 - - name: Linux wheels cache - uses: actions/cache@v2.1.3 - if: startsWith(runner.os, 'Linux') + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2.2.1 with: - path: ~/.cache/pip/wheels - key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2-${{ github.sha }} - restore-keys: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2 + python-version: ${{ matrix.python-version }} + architecture: ${{ matrix.python-architecture }} - - name: MacOS wheels cache - uses: actions/cache@v2.1.3 - if: startsWith(runner.os, 'macOS') - with: - path: ~/Library/Caches/pip/wheels - key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2-${{ github.sha }} - restore-keys: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2 + - name: Get pip cache location + shell: bash + id: pip_cache + run: | + python -m pip install -U pip + python -m pip --version + echo ::set-output name=pip_cache_dir::$(python -m pip cache dir) - - name: Windows wheels cache + - name: Wheels cache uses: actions/cache@v2.1.3 - if: startsWith(runner.os, 'Windows') with: - path: ~\AppData\Local\pip\Cache\wheels + path: ${{ steps.pip_cache.outputs.pip_cache_dir }}/wheels key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2-${{ github.sha }} restore-keys: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2.2.1 - with: - python-version: ${{ matrix.python-version }} - architecture: ${{ matrix.python-architecture }} - - name: MacOS dependencies run: HOMEBREW_NO_AUTO_UPDATE=1 brew install graphviz if: startsWith(runner.os, 'macOS') @@ -78,17 +78,7 @@ jobs: if: startsWith(runner.os, 'Windows') && matrix.python-architecture != 'x86' - name: Cleanup old wheels - shell: bash - run: | - pip cache info - INSTALLED=`pip list | sed 's/-/_/g' | sed 's/ */-/' | tail -n+3` - CACHED=`pip cache list | cut -f 2,3 -d- | tail -n+3` - for P in $CACHED; do - # Remove cached and not installed - if [ `echo $INSTALLED | grep -o $P | wc -l` == "0" ] ; then - pip cache remove -v $P - fi - done + uses: ./.github/actions/cleanup-pip-cache - name: Lint with flake8 shell: bash diff --git a/.github/workflows/pnl-docs.yml b/.github/workflows/pnl-docs.yml new file mode 100644 index 00000000000..016491b9f4a --- /dev/null +++ b/.github/workflows/pnl-docs.yml @@ -0,0 +1,116 @@ +name: PsyNeuLink Docs Compare + +on: pull_request + +jobs: + docs-build: + strategy: + fail-fast: false + matrix: + python-version: [3.7] + os: [ubuntu-latest] + pnl-version: [ 'base', 'merge'] + + runs-on: ${{ matrix.os }} + + defaults: + run: + shell: bash + + steps: + - name: Checkout merge commit + uses: actions/checkout@v2 + if: ${{ matrix.pnl-version == 'merge' }} + with: + fetch-depth: 10 + ref: ${{ github.ref }} + + - name: Checkout pull base + uses: actions/checkout@v2 + if: ${{ matrix.pnl-version == 'base' }} + with: + fetch-depth: 10 + ref: ${{ github.base_ref }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2.2.1 + with: + python-version: ${{ matrix.python-version }} + architecture: ${{ matrix.python-architecture }} + + - name: Docs dependencies + run: | + # Install numpy first + python -m pip install --upgrade pip wheel $(grep numpy requirements.txt) + # We need to install all PNL deps since docs config imports psyneulink module + pip install -e .[doc] + + - name: Add git tag + # The generated docs include PNL version, + # set it to a fixed value to prevent polluting the diff + run: git tag 'v999.999.999.999' + + - name: Build docs + run: sphinx-build -b html -aE docs/source pnl-html + + - name: Upload generated docs + uses: actions/upload-artifact@v2 + with: + name: docs-${{ matrix.pnl-version }}-${{ matrix.os }}-${{ matrix.python-version }} + path: pnl-html + retention-days: 1 + + docs-compare: + strategy: + fail-fast: false + matrix: + python-version: [3.7] + os: [ubuntu-latest] + + runs-on: ${{ matrix.os }} + needs: [docs-build] + + steps: + + - name: Download generated base docs + uses: actions/download-artifact@v2 + with: + name: docs-base-${{ matrix.os }}-${{ matrix.python-version }} + path: docs-base + + - name: Download generated merge docs + uses: actions/download-artifact@v2 + with: + name: docs-merge-${{ matrix.os }}-${{ matrix.python-version }} + path: docs-merge + + - name: Compare + shell: bash + run: | + # Store the resulting diff, or 'No differences!' to and output file + # The 'or true' part is needed to workaourd 'pipefail' flag used by github-actions + (diff -r docs-base docs-merge && echo 'No differences!' || true) | tee result.diff + + - name: Post comment + uses: actions/github-script@v3 + # Post comment only if not PR across repos +# if: ${{ github.event.base.full_name }} == ${{ github.event.head.repo.full_name }} + with: + script: | + // Post comment only if not PR across repos + console.log(context.payload.pull_request.base.repo.full_name) + console.log(context.payload.pull_request.head.repo.full_name) + var base_repo_name = context.payload.pull_request.base.repo.full_name + var head_repo_name = context.payload.pull_request.head.repo.full_name + + if (base_repo_name != head_repo_name) return ; + + var fs = require("fs"); + var text = fs.readFileSync("./result.diff").slice(0,16384); + + github.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: 'This PR causes the following changes to the html docs (${{ matrix.os }}, python-${{ matrix.python-version }}):\n```\n' + text + '\n...\n```\nSee CI logs for the full diff.' + }) diff --git a/dev_requirements.txt b/dev_requirements.txt index 3bb770015db..f81b68e74d0 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,5 +1,4 @@ jupyter<=1.0.0 -psyneulink-sphinx-theme<=1.2.1.7 pytest<6.2.2 pytest-benchmark<=3.2.3 pytest-cov<=2.10.1 @@ -8,5 +7,3 @@ pytest-profiling<=1.7.0 pytest-pycodestyle<=2.2.0 pytest-pydocstyle<=2.2.0 pytest-xdist<2.3.0 -sphinx<3.3.2 -sphinx_autodoc_typehints<1.12.0 diff --git a/doc_requirements.txt b/doc_requirements.txt new file mode 100644 index 00000000000..5cb38f4a11d --- /dev/null +++ b/doc_requirements.txt @@ -0,0 +1,3 @@ +psyneulink-sphinx-theme<=1.2.1.7 +sphinx<3.3.2 +sphinx_autodoc_typehints<1.12.0 diff --git a/docs/source/Compilation.rst b/docs/source/Compilation.rst new file mode 100644 index 00000000000..a763be57f49 --- /dev/null +++ b/docs/source/Compilation.rst @@ -0,0 +1,39 @@ +Compilation +=========== + +PsyNeulink includes a runtime compiler to improve performance of executed models. +This section describes the overview of the compiler design and its use. +The performance improvements varies, but it has been observed to be between one and three orders of magnitude depending on the model. + + +Overview +-------- + +The PsyNeuLink runtime compiler works in several steps when invoked via `run` or `execute`: +*Compilation*: + #. The model is initialized. This is step is identical to non-compiled execution. + #. Data structures (input/output/parameters) are flattened and converted to LLVM IR form. + #. LLVM IR code is generated to match to semantics of individual components and the used scheduling rules. + #. Host CPU compatible binary code is generated + #. The resulting function is saved as `ctypes` function and the parameter types are converted to `ctypes` binary structures. + +*Execution*: + #. parameter structures are populated with the data from `Composition` based on the provided `execution_id`. These structures are preserved between invocations so executions with the same `execution_id` will reuse the same binary structures. + #. `ctype` function from step 5. is executed + #. Results are extracted from the binary structures and converted to Python format. + + +Use +--- + +Compiled form of a model can be invoked by passing one of the following values to the `bin_execute` parameter of `Composition.run`, or `Composition.exec`: + + * `False` or `Python`: Normal python execution + * `LLVM`: Compile and execute individual nodes. The scheduling loop still runs in Python. If any of the nodes fails to compile, an error is raised. *NOTE:* Schedules that require access to node data will not work correctly. + * `LLVMExec`: Execution of `Composition.exec` is replaced by a compiled equivalent. If the `Composition` fails to compile, an error is raised. + * `LLVMRun`: Execution of `Composition.run` is replaced by a compiled equivalent. If the `Composition` fails to compiler, an error is raised. + * `True`: This option attempts all three above mentioned granularities, and gracefully falls back to lower granularity. Warnings are raised in place of errors. This is the recommended way to invoke compiled execution as the final fallback is the Python baseline. + +Note that data other than `Composition.run` outputs are not synchronized between Python and compiled execution. + + It is possible to invoke compiled version of `FUnction` s and `Mechanism` s. This functionality is provided for testing purposes only, because of the lack of data synchronization it is not recommended for general use. diff --git a/docs/source/Core.rst b/docs/source/Core.rst index b0828df0b17..56dd660972c 100644 --- a/docs/source/Core.rst +++ b/docs/source/Core.rst @@ -17,3 +17,4 @@ Core - `Registry` - `Preferences` - `json` + - `Compilation` diff --git a/docs/source/Services.rst b/docs/source/Services.rst index 142d725026b..29e3aead2b1 100644 --- a/docs/source/Services.rst +++ b/docs/source/Services.rst @@ -9,3 +9,4 @@ Services Registry Preferences json + Compilation diff --git a/psyneulink/core/components/component.py b/psyneulink/core/components/component.py index 39f788d75cd..ccad0a756fa 100644 --- a/psyneulink/core/components/component.py +++ b/psyneulink/core/components/component.py @@ -2040,6 +2040,7 @@ def _is_user_specified(parameter): if isinstance(val, Function): val.owner = self + p._validate(val) p.set(val, context=context, skip_history=True, override=True) if isinstance(p.default_value, Function): @@ -2213,9 +2214,27 @@ def _override_unspecified_shared_parameters(self, context): # only show warning one time, for the non-default value if possible and c is shared_objs[-1] ): - warnings.warn( - f'Specification of the "{param.name}" parameter' - f' ({param.default_value}) for {self} conflicts with specification of its shared parameter "{shared_obj_param.name}" ({shared_obj_param.default_value}) for its {param.attribute_name} ({param.source._owner._owner}). The value specified on {param.source._owner._owner} will be used.') + try: + isp_arg = self.initial_shared_parameters[param.attribute_name][param.shared_parameter_name] + # TODO: handle passed component but copied? + throw_warning = ( + # arg passed directly into shared_obj, no parsing + not safe_equals(shared_obj_param._get(context), isp_arg) + # arg passed but was parsed + and not safe_equals(shared_obj_param.spec, isp_arg) + ) + except KeyError: + throw_warning = True + + if throw_warning: + warnings.warn( + f'Specification of the "{param.name}" parameter ({param.default_value})' + f' for {self} conflicts with specification of its shared parameter' + f' "{shared_obj_param.name}" ({shared_obj_param.default_value}) for its' + f' {param.attribute_name} ({param.source._owner._owner}). The value' + f' specified on {param.source._owner._owner} will be used.' + ) + @handle_external_context() def reset_params(self, mode=ResetMode.INSTANCE_TO_CLASS, context=None): diff --git a/psyneulink/core/components/ports/parameterport.py b/psyneulink/core/components/ports/parameterport.py index ad627bc83b7..4909f850e23 100644 --- a/psyneulink/core/components/ports/parameterport.py +++ b/psyneulink/core/components/ports/parameterport.py @@ -474,6 +474,19 @@ def __getitem__(self, key): return res + def __delitem__(self, key): + main_port = self[key] + rem_mapping_keys = set() + + for m, port in self.parameter_mapping.items(): + if port is main_port: + rem_mapping_keys.add(m) + + for m in rem_mapping_keys: + del self.parameter_mapping[m] + + del self.data[self.data.index(main_port)] + def _get_possible_port_names(self, param_name): """ Returns: diff --git a/requirements.txt b/requirements.txt index e18a7b7b068..e5720cbc340 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ autograd<=1.3 dill<=0.32 elfi<0.7.8 -graphviz<0.16.0 +graphviz<0.17.0 grpcio<1.35.0 grpcio-tools<1.35.0 llvmlite<0.36 diff --git a/setup.py b/setup.py index ed671d57a23..77d128184f1 100644 --- a/setup.py +++ b/setup.py @@ -80,6 +80,7 @@ def get_requirements(require_name=None): extras_require={ 'dev': get_requirements('dev'), + 'doc': get_requirements('doc'), 'tutorial': get_requirements('tutorial'), } ) diff --git a/tests/misc/test_parameters.py b/tests/misc/test_parameters.py index 7683eb7064e..4e86418ae63 100644 --- a/tests/misc/test_parameters.py +++ b/tests/misc/test_parameters.py @@ -2,6 +2,18 @@ import numpy as np import psyneulink as pnl import pytest +import re +import warnings + + +def shared_parameter_warning_regex(param_name, shared_name=None): + if shared_name is None: + shared_name = param_name + + return ( + f'Specification of the "{param_name}" parameter.*conflicts' + f' with specification of its shared parameter "{shared_name}"' + ) # (ancestor, child, should_override) @@ -339,12 +351,56 @@ def test_override_tmech(self, integrator_function, expected_rate): def test_conflict_warning(self): with pytest.warns( UserWarning, - match=( - 'Specification of the "integration_rate" parameter.*conflicts' - ' with specification of its shared parameter "rate"' - ) + match=shared_parameter_warning_regex('integration_rate', 'rate') ): pnl.TransferMechanism( integration_rate=.1, integrator_function=pnl.AdaptiveIntegrator(rate=.2) ) + + @pytest.mark.parametrize( + 'mech_type, param_name, shared_param_name, param_value', + [ + (pnl.LCAMechanism, 'noise', 'noise', pnl.GaussianDistort), + (pnl.LCAMechanism, 'noise', 'noise', pnl.GaussianDistort()), + (pnl.TransferMechanism, 'noise', 'noise', pnl.NormalDist), + (pnl.TransferMechanism, 'noise', 'noise', pnl.NormalDist()), + (pnl.TransferMechanism, 'noise', 'noise', [pnl.NormalDist()]), + ] + ) + def test_conflict_no_warning( + self, + mech_type, + param_name, + shared_param_name, + param_value + ): + # pytest doesn't support inverse warning assertion for specific + # warning only + with warnings.catch_warnings(): + warnings.simplefilter(action='error', category=UserWarning) + try: + mech_type(**{param_name: param_value}) + except UserWarning as w: + if re.match(shared_parameter_warning_regex(param_name, shared_param_name), str(w)): + raise + + def test_conflict_no_warning_parser(self): + # replace with different class/parameter if _parse_noise ever implemented + assert not hasattr(pnl.AdaptiveIntegrator.Parameters, '_parse_noise') + pnl.AdaptiveIntegrator.Parameters._parse_noise = lambda self, noise: 2 * noise + + # pytest doesn't support inverse warning assertion for specific + # warning only + with warnings.catch_warnings(): + warnings.simplefilter(action='error', category=UserWarning) + try: + pnl.TransferMechanism( + noise=2, + integrator_function=pnl.AdaptiveIntegrator(noise=1) + ) + except UserWarning as w: + if re.match(shared_parameter_warning_regex('noise'), str(w)): + raise + + delattr(pnl.AdaptiveIntegrator.Parameters, '_parse_noise') diff --git a/tests/projections/test_projection_specifications.py b/tests/projections/test_projection_specifications.py index e7bff1857f0..cf3a4af8327 100644 --- a/tests/projections/test_projection_specifications.py +++ b/tests/projections/test_projection_specifications.py @@ -216,49 +216,55 @@ def test_2_item_tuple_from_input_and_output_ports_to_gating_signals(self): assert T.input_ports[0].mod_afferents[0].sender==G.gating_signals[0] assert T.output_ports[0].mod_afferents[0].sender==G.gating_signals[1] - def test_formats_for_control_specification_for_mechanism_and_function_params(self): - - control_spec_list = [ - pnl.CONTROL, - pnl.CONTROL_SIGNAL, - pnl.CONTROL_PROJECTION, - pnl.ControlSignal, - pnl.ControlSignal(), - pnl.ControlProjection, - "CP_OBJECT", - pnl.ControlMechanism, - pnl.ControlMechanism(), - pnl.ControlMechanism, - (0.3, pnl.CONTROL), - (0.3, pnl.CONTROL_SIGNAL), - (0.3, pnl.CONTROL_PROJECTION), - (0.3, pnl.ControlSignal), - (0.3, pnl.ControlSignal()), - (0.3, pnl.ControlProjection), - (0.3, "CP_OBJECT"), - (0.3, pnl.ControlMechanism), - (0.3, pnl.ControlMechanism()), - (0.3, pnl.ControlMechanism) - ] - for i, ctl_tuple in enumerate([j for j in zip(control_spec_list, reversed(control_spec_list))]): - C1, C2 = ctl_tuple - - # This shenanigans is to avoid assigning the same instantiated ControlProjection more than once - if C1 == 'CP_OBJECT': - C1 = pnl.ControlProjection() - elif isinstance(C1, tuple) and C1[1] == 'CP_OBJECT': - C1 = (C1[0], pnl.ControlProjection()) - if C2 == 'CP_OBJECT': - C2 = pnl.ControlProjection() - elif isinstance(C2, tuple) and C2[1] == 'CP_OBJECT': - C2 = (C2[0], pnl.ControlProjection()) - - R = pnl.RecurrentTransferMechanism(noise=C1, - function=psyneulink.core.components.functions.transferfunctions.Logistic(gain=C2)) - assert R.parameter_ports[pnl.NOISE].mod_afferents[0].name in \ - 'ControlProjection for RecurrentTransferMechanism-{}[noise]'.format(i) - assert R.parameter_ports[pnl.GAIN].mod_afferents[0].name in \ - 'ControlProjection for RecurrentTransferMechanism-{}[gain]'.format(i) + control_spec_list = [ + pnl.CONTROL, + pnl.CONTROL_SIGNAL, + pnl.CONTROL_PROJECTION, + pnl.ControlSignal, + pnl.ControlSignal(), + pnl.ControlProjection, + "CP_OBJECT", + pnl.ControlMechanism, + pnl.ControlMechanism(), + pnl.ControlMechanism, + (0.3, pnl.CONTROL), + (0.3, pnl.CONTROL_SIGNAL), + (0.3, pnl.CONTROL_PROJECTION), + (0.3, pnl.ControlSignal), + (0.3, pnl.ControlSignal()), + (0.3, pnl.ControlProjection), + (0.3, "CP_OBJECT"), + (0.3, pnl.ControlMechanism), + (0.3, pnl.ControlMechanism()), + (0.3, pnl.ControlMechanism) + ] + + @pytest.mark.parametrize( + 'noise, gain', + [(noise, gain) for noise, gain in [j for j in zip(control_spec_list, reversed(control_spec_list))]] + ) + def test_formats_for_control_specification_for_mechanism_and_function_params(self, noise, gain): + # This shenanigans is to avoid assigning the same instantiated ControlProjection more than once + if noise == 'CP_OBJECT': + noise = pnl.ControlProjection() + elif isinstance(noise, tuple) and noise[1] == 'CP_OBJECT': + noise = (noise[0], pnl.ControlProjection()) + if gain == 'CP_OBJECT': + gain = pnl.ControlProjection() + elif isinstance(gain, tuple) and gain[1] == 'CP_OBJECT': + gain = (gain[0], pnl.ControlProjection()) + + R = pnl.RecurrentTransferMechanism( + # NOTE: fixed name prevents failures due to registry naming + # for parallel test runs + name='R-CONTROL', + noise=noise, + function=psyneulink.core.components.functions.transferfunctions.Logistic(gain=gain) + ) + assert R.parameter_ports[pnl.NOISE].mod_afferents[0].name in \ + 'ControlProjection for R-CONTROL[noise]' + assert R.parameter_ports[pnl.GAIN].mod_afferents[0].name in \ + 'ControlProjection for R-CONTROL[gain]' gating_spec_list = [ pnl.GATING, diff --git a/tutorial_requirements.txt b/tutorial_requirements.txt index b0817b2e274..fc0bb1f1ac5 100644 --- a/tutorial_requirements.txt +++ b/tutorial_requirements.txt @@ -1,3 +1,3 @@ -graphviz<0.16.0 +graphviz<0.17.0 jupyter<=1.0.0 matplotlib<3.3.4