Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

V2.4.3 #524

Merged
merged 3 commits into from
May 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions Changelog.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
# CHANGELOG
All notable changes to this project will be documented here.

## [v2.4.3]
- Omit skipped test cases in Python tester (#522)

## [v2.4.2]
- Ensure _env_status is updated to "setup" earlier when a request to update test settings is made (#499)

Expand Down
1 change: 1 addition & 0 deletions server/autotest_server/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,6 +343,7 @@ def run_test(settings_id, test_id, files_url, categories, user, test_env_vars):
redis_connection().hset("autotest:settings", key=settings_id, value=json.dumps(settings))
test_username, tests_path = tester_user()
try:
_clear_working_directory(tests_path, test_username)
_setup_files(settings_id, user, files_url, tests_path, test_username)
cmd = run_test_command(test_username=test_username)
results = _run_test_specs(cmd, settings, categories, tests_path, test_username, test_id, test_env_vars)
Expand Down
2 changes: 1 addition & 1 deletion server/autotest_server/testers/py/py_tester.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def pytest_runtest_makereport(self, item, call):
"""
outcome = yield
rep = outcome.get_result()
if rep.failed or item.nodeid not in self.results:
if rep.failed or (item.nodeid not in self.results and not rep.skipped and rep.when != "teardown"):
self.results[item.nodeid] = {
"status": "failure" if rep.failed else "success",
"name": item.nodeid,
Expand Down
2 changes: 1 addition & 1 deletion server/autotest_server/testers/tester.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from abc import ABC, abstractmethod
from functools import wraps
from typing import Optional, Callable, Any, Type, Dict, List
from testers.specs import TestSpecs
from .specs import TestSpecs
import traceback


Expand Down
39 changes: 39 additions & 0 deletions server/autotest_server/tests/test_autotest_server.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
import subprocess

import pytest
import fakeredis
import rq
import autotest_server
import os


@pytest.fixture
Expand All @@ -26,3 +29,39 @@ def fake_redis_db(monkeypatch, fake_job):

def test_redis_connection(fake_redis_conn):
assert autotest_server.redis_connection() == fake_redis_conn


def test_sticky():
workers = autotest_server.config["workers"]
autotest_worker = workers[0]["user"]
autotest_worker_working_dir = f"/home/docker/.autotesting/workers/{autotest_worker}"
path = f"{autotest_worker_working_dir}/test_sticky"

if not os.path.exists(path):
mkdir_cmd = f"sudo -u {autotest_worker} mkdir {path}"
chmod_cmd = f"sudo -u {autotest_worker} chmod 000 {path}"
chmod_sticky_cmd = f"sudo -u {autotest_worker} chmod +t {path}"
subprocess.run(mkdir_cmd, shell=True)
subprocess.run(chmod_cmd, shell=True)
subprocess.run(chmod_sticky_cmd, shell=True)

autotest_server._clear_working_directory(autotest_worker_working_dir, autotest_worker)

assert os.path.exists(path) is False


def test_pre_remove():
workers = autotest_server.config["workers"]
autotest_worker = workers[0]["user"]
autotest_worker_working_dir = f"/home/docker/.autotesting/workers/{autotest_worker}"
path = f"{autotest_worker_working_dir}/__pycache__"

if not os.path.exists(path):
mkdir_cmd = f"sudo -u {autotest_worker} mkdir {path}"
chmod_cmd = f"sudo -u {autotest_worker} chmod 000 {path}"
subprocess.run(mkdir_cmd, shell=True)
subprocess.run(chmod_cmd, shell=True)

autotest_server._clear_working_directory(autotest_worker_working_dir, autotest_worker)

assert os.path.exists(path) is False
Empty file.
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import pytest


def add_one(x):
return x + 1


@pytest.mark.skip
def test_add_one():
assert add_one(1) == 2
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
def add_one(x):
return x + 1


def test_add_one():
assert add_one(1) == 2
63 changes: 63 additions & 0 deletions server/autotest_server/tests/testers/py/test_py_tester.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
from ....testers.specs import TestSpecs
from ....testers.py.py_tester import PyTester


def test_success(request, monkeypatch) -> None:
"""Test that when a test succeeds, it is added to the results."""
monkeypatch.chdir(request.fspath.dirname)
tester = PyTester(
specs=TestSpecs.from_json(
"""
{
"test_data": {
"script_files": ["fixtures/sample_tests_success.py"],
"category": ["instructor"],
"timeout": 30,
"tester": "pytest",
"output_verbosity": "short",
"extra_info": {
"criterion": "",
"name": "Python Test Group 1"
}
}
}
"""
)
)
results = tester.run_python_tests()
assert len(results) == 1
assert "fixtures/sample_tests_success.py" in results
assert len(results["fixtures/sample_tests_success.py"]) == 1

result = results["fixtures/sample_tests_success.py"][0]
assert result["status"] == "success"
# nodeid is inexact in CI test
assert result["name"].endswith("fixtures/sample_tests_success.py::test_add_one")
assert result["errors"] == ""
assert result["description"] is None


def test_skip(request, monkeypatch) -> None:
"""Test that when a test is skipped, it is omitted from the results."""
monkeypatch.chdir(request.fspath.dirname)
tester = PyTester(
specs=TestSpecs.from_json(
"""
{
"test_data": {
"script_files": ["fixtures/sample_tests_skip.py"],
"category": ["instructor"],
"timeout": 30,
"tester": "pytest",
"output_verbosity": "short",
"extra_info": {
"criterion": "",
"name": "Python Test Group 1"
}
}
}
"""
)
)
results = tester.run_python_tests()
assert results == {"fixtures/sample_tests_skip.py": []}
Loading