Skip to content

Commit

Permalink
tests: Use MappingProxyType to pass dict test parameters (#3076)
Browse files Browse the repository at this point in the history
Prevent accidental modifications of global dictionary instances.
  • Loading branch information
jvesely authored Oct 21, 2024
2 parents 1919965 + 4cfd3d8 commit ad50edf
Show file tree
Hide file tree
Showing 13 changed files with 48 additions and 42 deletions.
8 changes: 8 additions & 0 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import pytest
import re
import sys
import types

import graph_scheduler as gs
import psyneulink
Expand Down Expand Up @@ -46,6 +47,13 @@ def pytest_runtest_setup(item):
# Check that all 'cuda' tests are also marked 'llvm'
assert 'llvm' in item.keywords or 'cuda' not in item.keywords

# It the item is a parametrized function. It has a 'callspec' attribute.
# Convert any dict arguments to an unmutable MappingProxyType.
if hasattr(item, 'callspec'):
for k, v in item.callspec.params.items():
if isinstance(v, dict):
item.callspec.params[k] = types.MappingProxyType(v)

for m in marks_default_skip:
if m in item.keywords and not item.config.getvalue(m):
pytest.skip('{0} tests not requested'.format(m))
Expand Down
1 change: 1 addition & 0 deletions psyneulink/_typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,5 @@
Dict as Dict,
Iterable as Iterable,
Set as Set,
Mapping as Mapping,
)
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@
torch = None
from beartype import beartype

from psyneulink._typing import Optional, Union, Callable
from psyneulink._typing import Callable, Mapping, Optional, Union

from psyneulink.core import llvm as pnlvm
from psyneulink.core.components.component import parameter_keywords
Expand Down Expand Up @@ -3205,7 +3205,7 @@ def __init__(self,
adapt_entropy_weighting: Optional[ValidParamSpecType] = None,
output=None,
per_item=None,
params: Optional[dict] = None,
params: Optional[Mapping] = None,
owner=None,
prefs: Optional[ValidPrefSet] = None):

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
import numpy as np
from beartype import beartype

from psyneulink._typing import Optional, Union, Callable
from psyneulink._typing import Callable, Mapping, Optional, Union

from psyneulink.core import llvm as pnlvm
from psyneulink.core.components.component import DefaultsFlexibility
Expand Down Expand Up @@ -230,7 +230,7 @@ def __init__(self,
rate=None,
noise=None,
initializer=None,
params: Optional[dict] = None,
params: Optional[Mapping] = None,
owner=None,
prefs: Optional[ValidPrefSet] = None,
context=None,
Expand Down Expand Up @@ -559,7 +559,7 @@ def __init__(self,
increment=None,
noise=None,
initializer=None,
params: Optional[dict] = None,
params: Optional[Mapping] = None,
owner=None,
prefs: Optional[ValidPrefSet] = None):

Expand Down Expand Up @@ -836,7 +836,7 @@ def __init__(self,
noise=None,
offset=None,
initializer=None,
params: Optional[dict] = None,
params: Optional[Mapping] = None,
owner=None,
prefs: Optional[ValidPrefSet] = None):
super().__init__(
Expand Down Expand Up @@ -1072,7 +1072,7 @@ def __init__(self,
noise=None,
offset=None,
initializer=None,
params: Optional[dict] = None,
params: Optional[Mapping] = None,
owner=None,
prefs: Optional[ValidPrefSet] = None):

Expand Down Expand Up @@ -1592,7 +1592,7 @@ def __init__(self,
long_term_rate=None,
operation=None,
offset=None,
params: Optional[dict] = None,
params: Optional[Mapping] = None,
owner=None,
prefs: Optional[ValidPrefSet] = None):

Expand Down Expand Up @@ -2028,7 +2028,7 @@ def __init__(self,
min_val: Optional[ValidParamSpecType] = None,
noise=None,
initializer=None,
params: Optional[dict] = None,
params: Optional[Mapping] = None,
owner=None,
prefs: Optional[ValidPrefSet] = None,
# **kwargs
Expand Down Expand Up @@ -2451,7 +2451,7 @@ def __init__(
threshold=None,
time_step_size=None,
seed=None,
params: Optional[dict] = None,
params: Optional[Mapping] = None,
owner=None,
prefs: Optional[ValidPrefSet] = None,
**kwargs
Expand Down Expand Up @@ -3010,7 +3010,7 @@ def __init__(self,
initializer=None,
angle_function=None,
seed=None,
params: Optional[dict] = None,
params: Optional[Mapping] = None,
owner=None,
prefs: Optional[ValidPrefSet] = None,
**kwargs):
Expand Down Expand Up @@ -3466,7 +3466,7 @@ def __init__(
non_decision_time=None,
time_step_size=None,
starting_value=None,
params: Optional[dict] = None,
params: Optional[Mapping] = None,
seed=None,
owner=None,
prefs: Optional[ValidPrefSet] = None,
Expand Down Expand Up @@ -3761,7 +3761,7 @@ def __init__(self,
offset=None,
time_step_size=None,
initializer=None,
params: Optional[dict] = None,
params: Optional[Mapping] = None,
owner=None,
prefs: Optional[ValidPrefSet] = None,
**kwargs):
Expand Down Expand Up @@ -4466,7 +4466,7 @@ def __init__(self,
mode=None,
uncorrelated_activity=None,
integration_method=None,
params: Optional[dict] = None,
params: Optional[Mapping] = None,
owner=None,
prefs: Optional[ValidPrefSet] = None,
**kwargs):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,11 @@
import warnings
from collections import deque

from psyneulink._typing import Callable, List, Literal
from psyneulink._typing import Callable, List, Literal, Mapping, Optional, Union

import numpy as np
from beartype import beartype

from typing import Optional, Union
# from psyneulink._typing import

from psyneulink.core import llvm as pnlvm
from psyneulink.core.components.functions.function import (
DEFAULT_SEED, FunctionError, _random_state_getter, _seed_setter, EPSILON, _noise_setter
Expand Down Expand Up @@ -253,8 +250,8 @@ def __init__(self,
history:Optional[int]=None,
# history: Optional[int] = None,
initializer=None,
params: Optional[dict] = None,
# params: Optional[dict] = None,
params: Optional[Mapping] = None,
# params: Optional[Mapping] = None,
owner=None,
prefs: Optional[ValidPrefSet] = None
):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import numpy as np
from beartype import beartype

from psyneulink._typing import Optional
from psyneulink._typing import Mapping, Optional

from psyneulink.core import llvm as pnlvm
from psyneulink.core.components.component import DefaultsFlexibility, _has_initializers_setter, ComponentsMeta
Expand Down Expand Up @@ -235,7 +235,7 @@ def __init__(self,
rate=None,
noise=None,
initializer=None,
params: Optional[dict] = None,
params: Optional[Mapping] = None,
owner=None,
prefs: Optional[ValidPrefSet] = None,
context=None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@

from beartype import beartype

from psyneulink._typing import Optional
from psyneulink._typing import Mapping, Optional

from psyneulink.core.components.component import parameter_keywords
from psyneulink.core.components.functions.nonstateful.transferfunctions import Linear
Expand Down Expand Up @@ -243,7 +243,7 @@ def __init__(self,
weight=None,
exponent=None,
function=None,
control_signal_params:Optional[dict]=None,
control_signal_params:Optional[Mapping]=None,
params=None,
name=None,
prefs: Optional[ValidPrefSet] = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@
"""
from beartype import beartype

from psyneulink._typing import Optional
from psyneulink._typing import Optional, Mapping

from psyneulink.core.components.component import parameter_keywords
from psyneulink.core.components.functions.function import FunctionOutputType
Expand Down Expand Up @@ -244,7 +244,7 @@ def __init__(self,
function=None,
weight=None,
exponent=None,
gating_signal_params:Optional[dict]=None,
gating_signal_params:Optional[Mapping]=None,
params=None,
name=None,
prefs: Optional[ValidPrefSet] = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@
import numpy as np
from beartype import beartype

from psyneulink._typing import Optional, Union, Callable, Literal
from psyneulink._typing import Callable, Literal, Mapping, Optional, Union

from psyneulink.core.components.component import parameter_keywords
from psyneulink.core.components.functions.nonstateful.combinationfunctions import LinearCombination
Expand Down Expand Up @@ -481,12 +481,12 @@ def __init__(self,
error_function: Optional[Callable] = None,
learning_function: Optional[Callable] = None,
# FIX: 10/3/17 - TEST IF THIS OK AND REINSTATE IF SO
# learning_signal_params:Optional[dict]=None,
# learning_signal_params:Optional[Mapping]=None,
# learning_rate: Optional[ValidParamSpecType] = None,
learning_enabled: Optional[Union[bool, Literal['online', 'after']]] = None,
weight=None,
exponent=None,
params: Optional[dict] = None,
params: Optional[Mapping] = None,
name=None,
prefs: Optional[ValidPrefSet] = None,
**kwargs
Expand Down
4 changes: 2 additions & 2 deletions psyneulink/core/compositions/composition.py
Original file line number Diff line number Diff line change
Expand Up @@ -2897,7 +2897,7 @@ def input_function(env, result):
from PIL import Image
from beartype import beartype

from psyneulink._typing import Optional, Union, Literal, Type, Callable, List, Set
from psyneulink._typing import Callable, Literal, List, Mapping, Optional, Set, Type, Union

from psyneulink.core import llvm as pnlvm
from psyneulink.core.components.component import Component, ComponentError, ComponentsMeta
Expand Down Expand Up @@ -11514,7 +11514,7 @@ def run(
def learn(
self,
inputs: dict,
targets: Optional[dict] = None,
targets: Optional[Mapping] = None,
num_trials: Optional[int] = None,
epochs: int = 1,
learning_rate: Optional[Union[int,float]]=None,
Expand Down
6 changes: 3 additions & 3 deletions psyneulink/library/compositions/autodiffcomposition.py
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,6 @@
import collections
from packaging import version
from pathlib import Path, PosixPath
from typing import Optional

try:
import torch
Expand All @@ -343,6 +342,7 @@
from psyneulink.library.compositions.pytorchwrappers import PytorchCompositionWrapper
from psyneulink.library.compositions.pytorchshowgraph import PytorchShowGraph

Check notice

Code scanning / CodeQL

Cyclic import Note

Import of module
psyneulink.library.compositions.pytorchshowgraph
begins an import cycle.

from psyneulink._typing import Mapping, Optional
from psyneulink.core.components.mechanisms.processing.processingmechanism import ProcessingMechanism
from psyneulink.core.components.mechanisms.processing.compositioninterfacemechanism import CompositionInterfaceMechanism
from psyneulink.core.components.mechanisms.modulatory.modulatorymechanism import ModulatoryMechanism_Base
Expand Down Expand Up @@ -1456,8 +1456,8 @@ def execute(self,
runtime_params=None,
execution_mode:pnlvm.ExecutionMode = pnlvm.ExecutionMode.PyTorch,
skip_initialization=False,
synch_with_pnl_options:Optional[dict]=None,
retain_in_pnl_options:Optional[dict]=None,
synch_with_pnl_options:Optional[Mapping]=None,
retain_in_pnl_options:Optional[Mapping]=None,
report_output:ReportOutput=ReportOutput.OFF,
report_params:ReportOutput=ReportParams.OFF,
report_progress:ReportProgress=ReportProgress.OFF,
Expand Down
14 changes: 7 additions & 7 deletions psyneulink/library/compositions/compositionrunner.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@
# ********************************************* AutodiffComposition *************************************************

import numpy as np
from typing import Optional
from types import GeneratorType

from psyneulink._typing import Mapping, Optional
from psyneulink.core.llvm import ExecutionMode
from psyneulink.core.compositions.composition import Composition
from psyneulink.core.compositions.report import Report, ReportProgress, ReportDevices, LEARN_REPORT, PROGRESS_REPORT
Expand Down Expand Up @@ -55,8 +55,8 @@ def _batch_inputs(self,
minibatch_size: int = 1,
optimizations_per_minibatch: int = 1,
randomize: bool = True,
synch_with_pnl_options:Optional[dict] = None,
retain_in_pnl_options:Optional[dict] = None,
synch_with_pnl_options:Optional[Mapping] = None,
retain_in_pnl_options:Optional[Mapping] = None,
call_before_minibatch=None,
call_after_minibatch=None,
early_stopper=None,
Expand Down Expand Up @@ -161,8 +161,8 @@ def _batch_function_inputs(self,
num_trials: int,
batch_size: int = 1,
optimizations_per_minibatch: int = 1,
synch_with_pnl_options:Optional[dict] = None,
retain_in_pnl_options:Optional[dict] = None,
synch_with_pnl_options:Optional[Mapping] = None,
retain_in_pnl_options:Optional[Mapping] = None,
call_before_minibatch=None,
call_after_minibatch=None,
early_stopper=None,
Expand Down Expand Up @@ -223,8 +223,8 @@ def run_learning(self,
patience: int = None,
min_delta: int = 0,
randomize_minibatches: bool = True,
synch_with_pnl_options:Optional[dict] = None,
retain_in_pnl_options:Optional[dict] = None,
synch_with_pnl_options:Optional[Mapping] = None,
retain_in_pnl_options:Optional[Mapping] = None,
call_before_minibatch = None,
call_after_minibatch = None,
context=None,
Expand Down
2 changes: 1 addition & 1 deletion tests/functions/test_integrator.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ def test_execute(func, func_mode, variable, noise, params, benchmark):
if issubclass(func_class, (pnl.DriftDiffusionIntegrator, pnl.DriftOnASphereIntegrator)):
pytest.skip("{} doesn't support functional noise".format(func_class.componentName))

if 'DriftOnASphereIntegrator' in func[0].componentName:
if issubclass(func_class, pnl.DriftOnASphereIntegrator):
params = {**params, 'dimension': len(variable) + 1}

elif issubclass(func_class, pnl.AccumulatorIntegrator):
Expand Down

0 comments on commit ad50edf

Please sign in to comment.