Skip to content

Commit

Permalink
Merge commit 'c09578b4249e70cc9148d012055b1071f2ad19a9'
Browse files Browse the repository at this point in the history
* commit 'c09578b4249e70cc9148d012055b1071f2ad19a9':
  Squashed 'json/' changes from b7d13f4b..69acf529
  • Loading branch information
Julian committed Jul 11, 2022
2 parents 36d098e + c09578b commit bb74535
Show file tree
Hide file tree
Showing 49 changed files with 852 additions and 62 deletions.
2 changes: 2 additions & 0 deletions json/.github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# Ping the entire test suite team by default.
* @json-schema-org/test-suite-team
1 change: 1 addition & 0 deletions json/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -286,6 +286,7 @@ Node-specific support is maintained in a [separate repository](https://github.co
* [fastjsonschema](https://github.com/seznam/python-fastjsonschema)
* [hypothesis-jsonschema](https://github.com/Zac-HD/hypothesis-jsonschema)
* [jschon](https://github.com/marksparkza/jschon)
* [python-experimental, OpenAPI Generator](https://github.com/OpenAPITools/openapi-generator/blob/master/docs/generators/python-experimental.md)

### Ruby

Expand Down
135 changes: 73 additions & 62 deletions json/bin/jsonschema_suite
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#! /usr/bin/env python3
from pathlib import Path
import argparse
import errno
import fnmatch
import json
import os
import random
Expand All @@ -28,119 +28,133 @@ else:
}


ROOT_DIR = os.path.abspath(
os.path.join(os.path.dirname(__file__), os.pardir).rstrip("__pycache__"),
)
SUITE_ROOT_DIR = os.path.join(ROOT_DIR, "tests")
REMOTES_DIR = os.path.join(ROOT_DIR, "remotes")

ROOT_DIR = Path(__file__).parent.parent
SUITE_ROOT_DIR = ROOT_DIR / "tests"
REMOTES_DIR = ROOT_DIR / "remotes"

with open(os.path.join(ROOT_DIR, "test-schema.json")) as schema:
TESTSUITE_SCHEMA = json.load(schema)
TESTSUITE_SCHEMA = json.loads((ROOT_DIR / "test-schema.json").read_text())


def files(paths):
"""
Each test file in the provided paths.
Each test file in the provided paths, as an array of test cases.
"""
for path in paths:
with open(path) as test_file:
yield json.load(test_file)
yield json.loads(path.read_text())


def groups(paths):
def cases(paths):
"""
Each test group within each file in the provided paths.
Each test case within each file in the provided paths.
"""
for test_file in files(paths):
for group in test_file:
yield group
yield from test_file


def cases(paths):
def tests(paths):
"""
Each individual test case within all groups within the provided paths.
Each individual test within all cases within the provided paths.
"""
for test_group in groups(paths):
for test in test_group["tests"]:
test["schema"] = test_group["schema"]
for case in cases(paths):
for test in case["tests"]:
test["schema"] = case["schema"]
yield test


def collect(root_dir):
"""
All of the test file paths within the given root directory, recursively.
"""
for root, _, files in os.walk(root_dir):
for filename in fnmatch.filter(files, "*.json"):
yield os.path.join(root, filename)
return root_dir.glob("**/*.json")


class SanityTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("Looking for tests in %s" % SUITE_ROOT_DIR)
print("Looking for remotes in %s" % REMOTES_DIR)
print(f"Looking for tests in {SUITE_ROOT_DIR}")
print(f"Looking for remotes in {REMOTES_DIR}")

cls.test_files = list(collect(SUITE_ROOT_DIR))
cls.remote_files = list(collect(REMOTES_DIR))
print("Found %s test files" % len(cls.test_files))
print("Found %s remote files" % len(cls.remote_files))
assert cls.test_files, "Didn't find the test files!"
print(f"Found {len(cls.test_files)} test files")

cls.remote_files = list(collect(REMOTES_DIR))
assert cls.remote_files, "Didn't find the remote files!"
print(f"Found {len(cls.remote_files)} remote files")

def test_all_test_files_are_valid_json(self):
"""
All test files contain valid JSON.
"""
for path in self.test_files:
with open(path) as test_file:
try:
json.load(test_file)
except ValueError as error:
self.fail("%s contains invalid JSON (%s)" % (path, error))
try:
json.loads(path.read_text())
except ValueError as error:
self.fail(f"{path} contains invalid JSON ({error})")

def test_all_remote_files_are_valid_json(self):
"""
All remote files contain valid JSON.
"""
for path in self.remote_files:
with open(path) as remote_file:
try:
json.load(remote_file)
except ValueError as error:
self.fail("%s contains invalid JSON (%s)" % (path, error))
try:
json.loads(path.read_text())
except ValueError as error:
self.fail(f"{path} contains invalid JSON ({error})")

def test_all_descriptions_have_reasonable_length(self):
for case in cases(self.test_files):
description = case["description"]
"""
All tests have reasonably long descriptions.
"""
for count, test in enumerate(tests(self.test_files)):
description = test["description"]
self.assertLess(
len(description),
70,
"%r is too long! (keep it to less than 70 chars)" % (
description,
),
f"{description!r} is too long! (keep it to less than 70 chars)"
)
print(f"Found {count} tests.")

def test_all_descriptions_are_unique(self):
for group in groups(self.test_files):
descriptions = set(test["description"] for test in group["tests"])
"""
All test cases have unique test descriptions in their tests.
"""
for count, case in enumerate(cases(self.test_files)):
descriptions = set(test["description"] for test in case["tests"])
self.assertEqual(
len(descriptions),
len(group["tests"]),
"%r contains a duplicate description" % (group,)
len(case["tests"]),
f"{case!r} contains a duplicate description",
)
print(f"Found {count} test cases.")

@unittest.skipIf(jsonschema is None, "Validation library not present!")
def test_all_schemas_are_valid(self):
for version in os.listdir(SUITE_ROOT_DIR):
Validator = VALIDATORS.get(version)
"""
All schemas are valid under their metaschemas.
"""
for version in SUITE_ROOT_DIR.iterdir():
if not version.is_dir():
continue

Validator = VALIDATORS.get(version.name)
if Validator is not None:
test_files = collect(os.path.join(SUITE_ROOT_DIR, version))
test_files = collect(version)
for case in cases(test_files):
try:
Validator.check_schema(case["schema"])
except jsonschema.SchemaError as error:
self.fail("%s contains an invalid schema (%s)" %
(case, error))
self.fail(
f"{case} contains an invalid schema ({error})",
)
else:
warnings.warn("No schema validator for %s" % schema)
warnings.warn(f"No schema validator for {version.name}")

@unittest.skipIf(jsonschema is None, "Validation library not present!")
def test_suites_are_valid(self):
"""
All test files are valid under test-schema.json.
"""
Validator = jsonschema.validators.validator_for(TESTSUITE_SCHEMA)
validator = Validator(TESTSUITE_SCHEMA)
for tests in files(self.test_files):
Expand All @@ -153,7 +167,7 @@ class SanityTests(unittest.TestCase):
def main(arguments):
if arguments.command == "check":
suite = unittest.TestLoader().loadTestsFromTestCase(SanityTests)
result = unittest.TextTestRunner(verbosity=2).run(suite)
result = unittest.TextTestRunner().run(suite)
sys.exit(not result.wasSuccessful())
elif arguments.command == "flatten":
selected_cases = [case for case in cases(collect(arguments.version))]
Expand All @@ -166,20 +180,17 @@ def main(arguments):
remotes = {}
for path in collect(REMOTES_DIR):
relative_path = os.path.relpath(path, REMOTES_DIR)
with open(path) as schema_file:
remotes[relative_path] = json.load(schema_file)
remotes[relative_path] = json.loads(path.read_text())
json.dump(remotes, sys.stdout, indent=4, sort_keys=True)
elif arguments.command == "dump_remotes":
if arguments.update:
shutil.rmtree(arguments.out_dir, ignore_errors=True)

try:
shutil.copytree(REMOTES_DIR, arguments.out_dir)
except OSError as e:
if e.errno == errno.EEXIST:
print("%s already exists. Aborting." % arguments.out_dir)
sys.exit(1)
raise
except FileExistsError:
print(f"{arguments.out_dir} already exists. Aborting.")
sys.exit(1)
elif arguments.command == "serve":
try:
import flask
Expand Down
12 changes: 12 additions & 0 deletions json/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"name": "json-schema-test-suite",
"version": "0.1.0",
"description": "A language agnostic test suite for the JSON Schema specifications",
"repository": "github:json-schema-org/JSON-Schema-Test-Suite",
"keywords": [
"json-schema",
"tests"
],
"author": "http://json-schema.org",
"license": "MIT"
}
15 changes: 15 additions & 0 deletions json/tests/draft-next/additionalProperties.json
Original file line number Diff line number Diff line change
Expand Up @@ -129,5 +129,20 @@
"valid": false
}
]
},
{
"description": "additionalProperties should properly handle null data",
"schema": {
"additionalProperties": {
"type": "null"
}
},
"tests": [
{
"description": "null properties allowed",
"data": {"foo": null},
"valid": true
}
]
}
]
15 changes: 15 additions & 0 deletions json/tests/draft-next/contains.json
Original file line number Diff line number Diff line change
Expand Up @@ -237,5 +237,20 @@
"valid": false
}
]
},
{
"description": "contains should properly handle null data",
"schema": {
"contains": {
"type": "null"
}
},
"tests": [
{
"description": "null items allowed",
"data": [ null ],
"valid": true
}
]
}
]
15 changes: 15 additions & 0 deletions json/tests/draft-next/items.json
Original file line number Diff line number Diff line change
Expand Up @@ -252,5 +252,20 @@
"valid": false
}
]
},
{
"description": "items should properly handle null data",
"schema": {
"items": {
"type": "null"
}
},
"tests": [
{
"description": "null items allowed",
"data": [ null ],
"valid": true
}
]
}
]
15 changes: 15 additions & 0 deletions json/tests/draft-next/patternProperties.json
Original file line number Diff line number Diff line change
Expand Up @@ -152,5 +152,20 @@
"valid": true
}
]
},
{
"description": "patternProperties should properly handle null data",
"schema": {
"patternProperties": {
"^.*bar$": {"type": "null"}
}
},
"tests": [
{
"description": "null properties allowed",
"data": {"foobar": null},
"valid": true
}
]
}
]
17 changes: 17 additions & 0 deletions json/tests/draft-next/prefixItems.json
Original file line number Diff line number Diff line change
Expand Up @@ -77,5 +77,22 @@
"valid": true
}
]
},
{
"description": "prefixItems should properly handle null data",
"schema": {
"prefixItems": [
{
"type": "null"
}
]
},
"tests": [
{
"description": "null items allowed",
"data": [ null ],
"valid": true
}
]
}
]
15 changes: 15 additions & 0 deletions json/tests/draft-next/properties.json
Original file line number Diff line number Diff line change
Expand Up @@ -163,5 +163,20 @@
"valid": false
}
]
},
{
"description": "properties should properly handle null data",
"schema": {
"properties": {
"foo": {"type": "null"}
}
},
"tests": [
{
"description": "null properties allowed",
"data": {"foo": null},
"valid": true
}
]
}
]
Loading

0 comments on commit bb74535

Please sign in to comment.