Skip to content

Commit

Permalink
tests: split core and psyneulink integration tests
Browse files Browse the repository at this point in the history
  • Loading branch information
kmantel committed Dec 13, 2023
1 parent 205fd77 commit 7acff76
Show file tree
Hide file tree
Showing 12 changed files with 1,251 additions and 976 deletions.
67 changes: 67 additions & 0 deletions .github/workflows/ci-integration.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
name: CI Integration

on:
push:
branches-ignore:
- 'dependabot/**'
paths-ignore:
- 'docs/**'
- 'docs_requirements.txt'
tags-ignore:
- 'v**'
pull_request:
paths-ignore:
- 'docs/**'
- 'docs_requirements.txt'

jobs:
psyneulink:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
python-version: [3.7, 3.8, 3.9, '3.10', 3.11]
os: [ubuntu-latest, macos-latest, windows-latest]

steps:
- name: Checkout sources
uses: actions/checkout@v3
with:
# need history and tags for versioneer
fetch-depth: 0

- name: Set up Python ${{ matrix.python-version }}
uses: actions/[email protected]
with:
python-version: ${{ matrix.python-version }}
architecture: ${{ matrix.python-architecture }}

- name: Get pip cache location
shell: bash
id: pip_cache
run: |
python -m pip install -U pip
python -m pip --version
echo ::set-output name=pip_cache_dir::$(python -m pip cache dir)
- name: Wheels cache
uses: actions/[email protected]
with:
path: ${{ steps.pip_cache.outputs.pip_cache_dir }}/wheels
key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2-${{ github.sha }}
restore-keys: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-v2

- name: Install package
run: python -m pip install -r psyneulink_requirements.txt && python -m pip install -e .[dev]

- name: Test with pytest
timeout-minutes: 80
run: pytest --junit-xml=tests_out.xml --verbosity=0 -n auto --no-cov tests/integration/psyneulink/

- name: Upload test results
uses: actions/upload-artifact@v3
with:
name: test-results-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.python-architecture }}
path: tests_out.xml
retention-days: 5
if: success() || failure()
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: CI
name: CI Core

on:
push:
Expand Down Expand Up @@ -72,7 +72,7 @@ jobs:
- name: Test with pytest
timeout-minutes: 80
run: pytest --junit-xml=tests_out.xml --verbosity=0 -n auto
run: pytest --junit-xml=tests_out.xml --verbosity=0 -n auto --ignore tests/integration/

- name: Upload test results
uses: actions/upload-artifact@v3
Expand Down
52 changes: 10 additions & 42 deletions conftest.py
Original file line number Diff line number Diff line change
@@ -1,52 +1,20 @@
import doctest

import pytest
from psyneulink.core import llvm as pnlvm


def pytest_runtest_setup(item):
if 'cuda' in item.keywords and not pnlvm.ptx_enabled:
pytest.skip('PTX engine not enabled/available')

doctest.ELLIPSIS_MARKER = "[...]"


def pytest_generate_tests(metafunc):
if "comp_mode_no_llvm" in metafunc.fixturenames:
modes = [m for m in get_comp_execution_modes()
if m.values[0] is not pnlvm.ExecutionMode.LLVM]
metafunc.parametrize("comp_mode", modes)

elif "comp_mode" in metafunc.fixturenames:
metafunc.parametrize("comp_mode", get_comp_execution_modes())


def pytest_runtest_teardown(item):
pnlvm.cleanup()


@pytest.fixture
def comp_mode_no_llvm():
# dummy fixture to allow 'comp_mode' filtering
pass


@pytest.helpers.register
def get_comp_execution_modes():
return [
pytest.param(pnlvm.ExecutionMode.Python),
pytest.param(pnlvm.ExecutionMode.LLVM, marks=pytest.mark.llvm),
pytest.param(pnlvm.ExecutionMode.LLVMExec, marks=pytest.mark.llvm),
pytest.param(pnlvm.ExecutionMode.LLVMRun, marks=pytest.mark.llvm),
pytest.param(pnlvm.ExecutionMode.PTXExec, marks=[pytest.mark.llvm, pytest.mark.cuda]),
pytest.param(pnlvm.ExecutionMode.PTXRun, marks=[pytest.mark.llvm, pytest.mark.cuda])
]


# TODO: remove this helper when tests no longer use psyneulink
@pytest.helpers.register
def composition_to_scheduler_args(composition):
return {
'graph': composition.graph_processing.prune_feedback_edges()[0],
'default_execution_id': composition.default_execution_id
}
def setify_expected_output(expected_output):
type_set = type(set())
for i in range(len(expected_output)):
if type(expected_output[i]) is not type_set:
try:
iter(expected_output[i])
expected_output[i] = set(expected_output[i])
except TypeError:
expected_output[i] = set([expected_output[i]])
return expected_output
1 change: 0 additions & 1 deletion dev_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
psyneulink >=0.9.1.0
pydot
pytest
pytest-cov
Expand Down
1 change: 1 addition & 0 deletions psyneulink_requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
psyneulink >=0.9.1.0
47 changes: 47 additions & 0 deletions tests/integration/psyneulink/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import pytest
from psyneulink.core import llvm as pnlvm


def pytest_runtest_setup(item):
if 'cuda' in item.keywords and not pnlvm.ptx_enabled:
pytest.skip('PTX engine not enabled/available')


def pytest_generate_tests(metafunc):
if "comp_mode_no_llvm" in metafunc.fixturenames:
modes = [m for m in get_comp_execution_modes()
if m.values[0] is not pnlvm.ExecutionMode.LLVM]
metafunc.parametrize("comp_mode", modes)

elif "comp_mode" in metafunc.fixturenames:
metafunc.parametrize("comp_mode", get_comp_execution_modes())


def pytest_runtest_teardown(item):
pnlvm.cleanup()


@pytest.fixture
def comp_mode_no_llvm():
# dummy fixture to allow 'comp_mode' filtering
pass


@pytest.helpers.register
def get_comp_execution_modes():
return [
pytest.param(pnlvm.ExecutionMode.Python),
pytest.param(pnlvm.ExecutionMode.LLVM, marks=pytest.mark.llvm),
pytest.param(pnlvm.ExecutionMode.LLVMExec, marks=pytest.mark.llvm),
pytest.param(pnlvm.ExecutionMode.LLVMRun, marks=pytest.mark.llvm),
pytest.param(pnlvm.ExecutionMode.PTXExec, marks=[pytest.mark.llvm, pytest.mark.cuda]),
pytest.param(pnlvm.ExecutionMode.PTXRun, marks=[pytest.mark.llvm, pytest.mark.cuda])
]


@pytest.helpers.register
def composition_to_scheduler_args(composition):
return {
'graph': composition.graph_processing.prune_feedback_edges()[0],
'default_execution_id': composition.default_execution_id
}
36 changes: 36 additions & 0 deletions tests/integration/psyneulink/test_condition_psyneulink.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import numpy as np
import psyneulink as pnl
import pytest


@pytest.mark.psyneulink
class TestCondition:
@pytest.mark.parametrize(
'parameter, indices, default_variable, integration_rate, expected_results',
[
('value', None, None, 1, [[[10]]]),
('value', (0, 0), [[0, 0]], [1, 2], [[[10, 20]]]),
('value', (0, 1), [[0, 0]], [1, 2], [[[5, 10]]]),
('num_executions', pnl.TimeScale.TRIAL, None, 1, [[[10]]]),
]
)
@pytest.mark.parametrize('threshold', [10, 10.0])
def test_Threshold_parameters(
self, parameter, indices, default_variable, integration_rate, expected_results, threshold,
):

A = pnl.TransferMechanism(
default_variable=default_variable,
integrator_mode=True,
integrator_function=pnl.SimpleIntegrator,
integration_rate=integration_rate,
)
comp = pnl.Composition(pathways=[A])

comp.termination_processing = {
pnl.TimeScale.TRIAL: pnl.Threshold(A, parameter, threshold, '>=', indices=indices)
}

comp.run(inputs={A: np.ones(A.defaults.variable.shape)})

np.testing.assert_array_equal(comp.results, expected_results)
Loading

0 comments on commit 7acff76

Please sign in to comment.