diff --git a/poetry.lock b/poetry.lock index 74bd278..551d29d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -159,73 +159,76 @@ files = [ test = ["pytest (>=6)"] [[package]] -name = "html5lib" -version = "1.1" +name = "html5lib-modern" +version = "1.2" description = "HTML parser based on the WHATWG HTML specification" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" files = [ - {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, - {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, + {file = "html5lib_modern-1.2-py2.py3-none-any.whl", hash = "sha256:3458b6e31525ede4fcaac0ff42d9eeb5efaf755473768103cb56e0275caa8d99"}, + {file = "html5lib_modern-1.2.tar.gz", hash = "sha256:1fadbfc27ea955431270e4e79a4a4c290ba11c3a3098a95cc22dc73e312a1768"}, ] -[package.dependencies] -six = ">=1.9" -webencodings = "*" - [package.extras] -all = ["chardet (>=2.2)", "genshi", "lxml"] -chardet = ["chardet (>=2.2)"] -genshi = ["genshi"] -lxml = ["lxml"] +all = ["chardet (>=2.2.1)", "genshi (>=0.7.1)", "lxml (>=3.4.0)"] +chardet = ["chardet (>=2.2.1)"] +genshi = ["genshi (>=0.7.1)"] +lxml = ["lxml (>=3.4.0)"] [[package]] name = "httptools" -version = "0.6.1" +version = "0.6.4" description = "A collection of framework independent HTTP protocol utils." optional = true python-versions = ">=3.8.0" files = [ - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, - {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, - {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, - {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, - {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, - {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, - {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}, + {file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}, + {file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}, + {file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}, + {file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d3f0d369e7ffbe59c4b6116a44d6a8eb4783aae027f2c0b366cf0aa964185dba"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:94978a49b8f4569ad607cd4946b759d90b285e39c0d4640c6b36ca7a3ddf2efc"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40dc6a8e399e15ea525305a2ddba998b0af5caa2566bcd79dcbe8948181eeaff"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab9ba8dcf59de5181f6be44a77458e45a578fc99c31510b8c65b7d5acc3cf490"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc411e1c0a7dcd2f902c7c48cf079947a7e65b5485dea9decb82b9105ca71a43"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d54efd20338ac52ba31e7da78e4a72570cf729fac82bc31ff9199bedf1dc7440"}, + {file = "httptools-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:df959752a0c2748a65ab5387d08287abf6779ae9165916fe053e68ae1fbdc47f"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"}, + {file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"}, + {file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}, ] [package.extras] -test = ["Cython (>=0.29.24,<0.30.0)"] +test = ["Cython (>=0.29.24)"] [[package]] name = "importlib-metadata" @@ -263,18 +266,15 @@ files = [ [[package]] name = "isodate" -version = "0.6.1" +version = "0.7.2" description = "An ISO 8601 date/time/duration parser and formatter" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, + {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, + {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, ] -[package.dependencies] -six = "*" - [[package]] name = "multidict" version = "6.1.0" @@ -767,25 +767,25 @@ files = [ [[package]] name = "rdflib" -version = "7.0.0" +version = "7.1.0" description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." optional = false -python-versions = ">=3.8.1,<4.0.0" +python-versions = "<4.0.0,>=3.8.1" files = [ - {file = "rdflib-7.0.0-py3-none-any.whl", hash = "sha256:0438920912a642c866a513de6fe8a0001bd86ef975057d6962c79ce4771687cd"}, - {file = "rdflib-7.0.0.tar.gz", hash = "sha256:9995eb8569428059b8c1affd26b25eac510d64f5043d9ce8c84e0d0036e995ae"}, + {file = "rdflib-7.1.0-py3-none-any.whl", hash = "sha256:240c25c6e1b573ffa67aed23aae128e253c443c15291c9a01d8d392ea80c05b6"}, + {file = "rdflib-7.1.0.tar.gz", hash = "sha256:a29a8fccebd3d3a5f1b7e88d92dace1c89829018c7d29a6114fff4449c188b3b"}, ] [package.dependencies] -html5lib = {version = ">=1.0,<2.0", optional = true, markers = "extra == \"html\""} -isodate = ">=0.6.0,<0.7.0" +html5lib-modern = ">=1.2,<2.0" +isodate = {version = ">=0.7.2,<1.0.0", markers = "python_version < \"3.11\""} pyparsing = ">=2.1.0,<4" [package.extras] berkeleydb = ["berkeleydb (>=18.1.0,<19.0.0)"] -html = ["html5lib (>=1.0,<2.0)"] -lxml = ["lxml (>=4.3.0,<5.0.0)"] -networkx = ["networkx (>=2.0.0,<3.0.0)"] +lxml = ["lxml (>=4.3,<6.0)"] +networkx = ["networkx (>=2,<4)"] +orjson = ["orjson (>=3.9.14,<4)"] [[package]] name = "ruff" @@ -885,17 +885,6 @@ files = [ {file = "sanic_routing-23.12.0-py3-none-any.whl", hash = "sha256:1558a72afcb9046ed3134a5edae02fc1552cff08f0fff2e8d5de0877ea43ed73"}, ] -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - [[package]] name = "toml" version = "0.10.2" @@ -959,13 +948,13 @@ files = [ [[package]] name = "types-setuptools" -version = "75.1.0.20240917" +version = "75.2.0.20241019" description = "Typing stubs for setuptools" optional = true python-versions = ">=3.8" files = [ - {file = "types-setuptools-75.1.0.20240917.tar.gz", hash = "sha256:12f12a165e7ed383f31def705e5c0fa1c26215dd466b0af34bd042f7d5331f55"}, - {file = "types_setuptools-75.1.0.20240917-py3-none-any.whl", hash = "sha256:06f78307e68d1bbde6938072c57b81cf8a99bc84bd6dc7e4c5014730b097dc0c"}, + {file = "types-setuptools-75.2.0.20241019.tar.gz", hash = "sha256:86ea31b5f6df2c6b8f2dc8ae3f72b213607f62549b6fa2ed5866e5299f968694"}, + {file = "types_setuptools-75.2.0.20241019-py3-none-any.whl", hash = "sha256:2e48ff3acd4919471e80d5e3f049cce5c177e108d5d36d2d4cee3fa4d4104258"}, ] [[package]] @@ -1068,47 +1057,54 @@ files = [ [[package]] name = "uvloop" -version = "0.20.0" +version = "0.21.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = true python-versions = ">=3.8.0" files = [ - {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9ebafa0b96c62881d5cafa02d9da2e44c23f9f0cd829f3a32a6aff771449c996"}, - {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:35968fc697b0527a06e134999eef859b4034b37aebca537daeb598b9d45a137b"}, - {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b16696f10e59d7580979b420eedf6650010a4a9c3bd8113f24a103dfdb770b10"}, - {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b04d96188d365151d1af41fa2d23257b674e7ead68cfd61c725a422764062ae"}, - {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94707205efbe809dfa3a0d09c08bef1352f5d3d6612a506f10a319933757c006"}, - {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89e8d33bb88d7263f74dc57d69f0063e06b5a5ce50bb9a6b32f5fcbe655f9e73"}, - {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e50289c101495e0d1bb0bfcb4a60adde56e32f4449a67216a1ab2750aa84f037"}, - {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e237f9c1e8a00e7d9ddaa288e535dc337a39bcbf679f290aee9d26df9e72bce9"}, - {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:746242cd703dc2b37f9d8b9f173749c15e9a918ddb021575a0205ec29a38d31e"}, - {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82edbfd3df39fb3d108fc079ebc461330f7c2e33dbd002d146bf7c445ba6e756"}, - {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:80dc1b139516be2077b3e57ce1cb65bfed09149e1d175e0478e7a987863b68f0"}, - {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f44af67bf39af25db4c1ac27e82e9665717f9c26af2369c404be865c8818dcf"}, - {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4b75f2950ddb6feed85336412b9a0c310a2edbcf4cf931aa5cfe29034829676d"}, - {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:77fbc69c287596880ecec2d4c7a62346bef08b6209749bf6ce8c22bbaca0239e"}, - {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6462c95f48e2d8d4c993a2950cd3d31ab061864d1c226bbf0ee2f1a8f36674b9"}, - {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649c33034979273fa71aa25d0fe120ad1777c551d8c4cd2c0c9851d88fcb13ab"}, - {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a609780e942d43a275a617c0839d85f95c334bad29c4c0918252085113285b5"}, - {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aea15c78e0d9ad6555ed201344ae36db5c63d428818b4b2a42842b3870127c00"}, - {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0e94b221295b5e69de57a1bd4aeb0b3a29f61be6e1b478bb8a69a73377db7ba"}, - {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fee6044b64c965c425b65a4e17719953b96e065c5b7e09b599ff332bb2744bdf"}, - {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:265a99a2ff41a0fd56c19c3838b29bf54d1d177964c300dad388b27e84fd7847"}, - {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10c2956efcecb981bf9cfb8184d27d5d64b9033f917115a960b83f11bfa0d6b"}, - {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e7d61fe8e8d9335fac1bf8d5d82820b4808dd7a43020c149b63a1ada953d48a6"}, - {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2beee18efd33fa6fdb0976e18475a4042cd31c7433c866e8a09ab604c7c22ff2"}, - {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8c36fdf3e02cec92aed2d44f63565ad1522a499c654f07935c8f9d04db69e95"}, - {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0fac7be202596c7126146660725157d4813aa29a4cc990fe51346f75ff8fde7"}, - {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d0fba61846f294bce41eb44d60d58136090ea2b5b99efd21cbdf4e21927c56a"}, - {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95720bae002ac357202e0d866128eb1ac82545bcf0b549b9abe91b5178d9b541"}, - {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:36c530d8fa03bfa7085af54a48f2ca16ab74df3ec7108a46ba82fd8b411a2315"}, - {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e97152983442b499d7a71e44f29baa75b3b02e65d9c44ba53b10338e98dedb66"}, - {file = "uvloop-0.20.0.tar.gz", hash = "sha256:4603ca714a754fc8d9b197e325db25b2ea045385e8a3ad05d3463de725fdf469"}, + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff"}, + {file = "uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}, ] [package.extras] +dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] +test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] [[package]] name = "wcwidth" @@ -1121,17 +1117,6 @@ files = [ {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -optional = false -python-versions = "*" -files = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] - [[package]] name = "websockets" version = "13.1" diff --git a/pyproject.toml b/pyproject.toml index 5f01b90..3203932 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -228,7 +228,7 @@ commands_pre = commands = - poetry show - poetry run pytest --cov=pyshacl test/ + poetry run pytest -v --log-level=INFO --cov=pyshacl test/ - poetry run coverage combine --append poetry run coverage report -m poetry run coverage html -i diff --git a/pyshacl/assets/dash.pickle b/pyshacl/assets/dash.pickle new file mode 100644 index 0000000..0c8eb54 Binary files /dev/null and b/pyshacl/assets/dash.pickle differ diff --git a/pyshacl/assets/dash.ttl b/pyshacl/assets/dash.ttl new file mode 100644 index 0000000..02769be --- /dev/null +++ b/pyshacl/assets/dash.ttl @@ -0,0 +1,2366 @@ +# baseURI: http://datashapes.org/dash +# imports: http://www.w3.org/ns/shacl# +# prefix: dash + +@prefix dash: . +@prefix owl: . +@prefix rdf: . +@prefix rdfs: . +@prefix sh: . +@prefix tosh: . +@prefix xsd: . + + + a owl:Ontology ; + rdfs:comment "DASH is a SHACL library for frequently needed features and design patterns. The constraint components in this library are 100% standards compliant and will work on any engine that fully supports SHACL." ; + rdfs:label "DASH Data Shapes Vocabulary" ; + owl:imports sh: ; + sh:declare [ + sh:namespace "http://datashapes.org/dash#"^^xsd:anyURI ; + sh:prefix "dash" ; + ] ; + sh:declare [ + sh:namespace "http://purl.org/dc/terms/"^^xsd:anyURI ; + sh:prefix "dcterms" ; + ] ; + sh:declare [ + sh:namespace "http://www.w3.org/1999/02/22-rdf-syntax-ns#"^^xsd:anyURI ; + sh:prefix "rdf" ; + ] ; + sh:declare [ + sh:namespace "http://www.w3.org/2000/01/rdf-schema#"^^xsd:anyURI ; + sh:prefix "rdfs" ; + ] ; + sh:declare [ + sh:namespace "http://www.w3.org/2001/XMLSchema#"^^xsd:anyURI ; + sh:prefix "xsd" ; + ] ; + sh:declare [ + sh:namespace "http://www.w3.org/2002/07/owl#"^^xsd:anyURI ; + sh:prefix "owl" ; + ] ; + sh:declare [ + sh:namespace "http://www.w3.org/2004/02/skos/core#"^^xsd:anyURI ; + sh:prefix "skos" ; + ] ; +. +dash:APIStatus + a rdfs:Class ; + a sh:NodeShape ; + rdfs:comment "The class of possible values for dash:apiStatus." ; + rdfs:label "API Status" ; + rdfs:subClassOf rdfs:Resource ; +. +dash:Action + a dash:ShapeClass ; + dash:abstract true ; + rdfs:comment "An executable command triggered by an agent, backed by a Script implementation. Actions may get deactivated using sh:deactivated." ; + rdfs:label "Action" ; + rdfs:subClassOf dash:Script ; + rdfs:subClassOf sh:Parameterizable ; +. +dash:ActionGroup + a dash:ShapeClass ; + rdfs:comment "A group of ResourceActions, used to arrange items in menus etc. Similar to sh:PropertyGroups, they may have a sh:order and should have labels (in multiple languages if applicable)." ; + rdfs:label "Action group" ; + rdfs:subClassOf rdfs:Resource ; +. +dash:ActionTestCase + a dash:ShapeClass ; + rdfs:comment """A test case that evaluates a dash:Action using provided input parameters. Requires exactly one value for dash:action and will operate on the test case's graph (with imports) as both data and shapes graph. + +Currently only supports read-only actions, allowing the comparison of actual results with the expected results.""" ; + rdfs:label "Action test case" ; + rdfs:subClassOf dash:TestCase ; +. +dash:AllObjects + a dash:AllObjectsTarget ; + rdfs:comment "A reusable instance of dash:AllObjectsTarget." ; + rdfs:label "All objects" ; +. +dash:AllObjectsTarget + a sh:SPARQLTargetType ; + rdfs:comment "A target containing all objects in the data graph as focus nodes." ; + rdfs:label "All objects target" ; + rdfs:subClassOf sh:Target ; + sh:labelTemplate "All objects" ; + sh:prefixes ; + sh:select """SELECT DISTINCT ?this +WHERE { + ?anyS ?anyP ?this . +}""" ; +. +dash:AllSubjects + a dash:AllSubjectsTarget ; + rdfs:comment "A reusable instance of dash:AllSubjectsTarget." ; + rdfs:label "All subjects" ; +. +dash:AllSubjectsTarget + a sh:SPARQLTargetType ; + rdfs:comment "A target containing all subjects in the data graph as focus nodes." ; + rdfs:label "All subjects target" ; + rdfs:subClassOf sh:Target ; + sh:labelTemplate "All subjects" ; + sh:prefixes ; + sh:select """SELECT DISTINCT ?this +WHERE { + ?this ?anyP ?anyO . +}""" ; +. +dash:AutoCompleteEditor + a dash:SingleEditor ; + rdfs:comment "An auto-complete field to enter the label of instances of a class. This is the fallback editor for any URI resource if no other editors are more suitable." ; + rdfs:label "Auto-complete editor" ; +. +dash:BlankNodeViewer + a dash:SingleViewer ; + rdfs:comment "A Viewer for blank nodes, rendering as the label of the blank node." ; + rdfs:label "Blank node viewer" ; +. +dash:BooleanSelectEditor + a dash:SingleEditor ; + rdfs:comment """An editor for boolean literals, rendering as a select box with values true and false. + +Also displays the current value (such as "1"^^xsd:boolean), but only allows to switch to true or false.""" ; + rdfs:label "Boolean select editor" ; +. +dash:ChangeScript + a dash:ShapeClass ; + rdfs:comment """Class of ADS scripts that are executed after edits to the data graph were made, but within the same edit. + +These scripts may access the current changes from the graphs with names dataset.addedGraphURI and dataset.deletedGraphURI to learn about which resource values have been added or deleted. For example query them using graph.withDataGraph(dataset.addedGraphURI, ...) or via SPARQL's GRAPH keyword. + +Change scripts may then perform further changes which would again become visible to other change scripts. They MUST NOT have other side effects though, because they may get executed in Preview mode, or the change may cause constraint violations and then be rejected. For side effects, after the change has been applied, use commit scripts (dash:CommitScript). + +Change scripts are executed by their relative sh:order, with a default value of 0. Use lower values to execute before other scripts.""" ; + rdfs:label "Change script" ; + rdfs:subClassOf dash:Script ; +. +dash:ClosedByTypesConstraintComponent + a sh:ConstraintComponent ; + rdfs:comment "A constraint component that can be used to declare that focus nodes are \"closed\" based on their rdf:types, meaning that focus nodes may only have values for the properties that are explicitly enumerated via sh:property/sh:path in property constraints at their rdf:types and the superclasses of those. This assumes that the type classes are also shapes." ; + rdfs:label "Closed by types constraint component" ; + sh:nodeValidator [ + a sh:SPARQLSelectValidator ; + sh:message "Property {?path} is not among those permitted for any of the types" ; + sh:prefixes ; + sh:select """SELECT $this (?predicate AS ?path) ?value +WHERE { + FILTER ($closedByTypes) . + $this ?predicate ?value . + FILTER (?predicate != rdf:type) . + FILTER NOT EXISTS { + $this rdf:type ?type . + ?type rdfs:subClassOf* ?class . + GRAPH $shapesGraph { + ?class sh:property/sh:path ?predicate . + } + } +}""" ; + ] ; + sh:parameter dash:ClosedByTypesConstraintComponent-closedByTypes ; +. +dash:ClosedByTypesConstraintComponent-closedByTypes + a sh:Parameter ; + sh:path dash:closedByTypes ; + dash:reifiableBy dash:ConstraintReificationShape ; + sh:datatype xsd:boolean ; + sh:description "True to indicate that the focus nodes are closed by their types. A constraint violation is reported for each property value of the focus node where the property is not among those that are explicitly declared via sh:property/sh:path in any of the rdf:types of the focus node (and their superclasses). The property rdf:type is always permitted." ; + sh:maxCount 1 ; +. +dash:CoExistsWithConstraintComponent + a sh:ConstraintComponent ; + dash:localConstraint true ; + rdfs:comment "A constraint component that can be used to express a constraint on property shapes so that if the property path has any value then the given property must also have a value, and vice versa." ; + rdfs:label "Co-exists-with constraint component" ; + sh:message "Values must co-exist with values of {$coExistsWith}" ; + sh:parameter dash:CoExistsWithConstraintComponent-coExistsWith ; + sh:propertyValidator [ + a sh:SPARQLSelectValidator ; + sh:prefixes ; + sh:select """SELECT $this +WHERE { + { + FILTER (EXISTS { $this $PATH ?any } && NOT EXISTS { $this $coExistsWith ?any }) + } + UNION + { + FILTER (NOT EXISTS { $this $PATH ?any } && EXISTS { $this $coExistsWith ?any }) + } +}""" ; + ] ; +. +dash:CoExistsWithConstraintComponent-coExistsWith + a sh:Parameter ; + sh:path dash:coExistsWith ; + dash:editor dash:PropertyAutoCompleteEditor ; + dash:reifiableBy dash:ConstraintReificationShape ; + dash:viewer dash:PropertyLabelViewer ; + sh:description "The properties that must co-exist with the surrounding property (path). If the surrounding property path has any value then the given property must also have a value, and vice versa." ; + sh:name "co-exists with" ; + sh:nodeKind sh:IRI ; +. +dash:CommitScript + a dash:ShapeClass ; + rdfs:comment """Class of ADS scripts that are executed after edits to the data graph were made and have been committed. + +These scripts may access the changes that have just happened from the graphs with names dataset.addedGraphURI and dataset.deletedGraphURI to learn about which resource values have been added or deleted. For example query them using graph.withDataGraph(dataset.addedGraphURI, ...) or via SPARQL's GRAPH keyword. + +Commit scripts may then perform side effects such as updating other graphs or sending out notifications to external systems. For edits that should be made within a finishing change, use change scripts (dash:ChangeScript). + +Commit scripts are executed by their relative sh:order, with a default value of 0. Use lower values to execute before other scripts.""" ; + rdfs:label "Commit script" ; + rdfs:subClassOf dash:Script ; +. +dash:ConstraintReificationShape + a sh:NodeShape ; + rdfs:comment "Can be used to attach sh:severity and sh:messages to individual constraints using reification." ; + rdfs:label "Constraint reification shape" ; + sh:property dash:ConstraintReificationShape-message ; + sh:property dash:ConstraintReificationShape-severity ; +. +dash:ConstraintReificationShape-message + a sh:PropertyShape ; + sh:path sh:message ; + dash:singleLine true ; + sh:name "messages" ; + sh:nodeKind sh:Literal ; + sh:or dash:StringOrLangString ; +. +dash:ConstraintReificationShape-severity + a sh:PropertyShape ; + sh:path sh:severity ; + sh:class sh:Severity ; + sh:maxCount 1 ; + sh:name "severity" ; + sh:nodeKind sh:IRI ; +. +dash:Constructor + a dash:ShapeClass ; + rdfs:comment """A script that is executed when a new instance of the class associated via dash:constructor is created, e.g. from a New button. Such scripts typically declare one or more parameters that are collected from the user when the script starts. The values of these parameters can be used as named variables in the script for arbitrary purposes such as setting the URI or initializing some property values of the new instance. + +The variable focusNode will hold the named node of the selected type, for example when a constructor is associated with a superclass but the user has pressed New for a subclass. + +The last expression of the script will be used as result of the constructor, so that the surrounding tool knows which resource shall be navigated to next.""" ; + rdfs:label "Constructor" ; + rdfs:subClassOf dash:Script ; + rdfs:subClassOf sh:Parameterizable ; +. +dash:DateOrDateTime + a rdf:List ; + rdf:first [ + sh:datatype xsd:date ; + ] ; + rdf:rest ( + [ + sh:datatype xsd:dateTime ; + ] + ) ; + rdfs:comment "An rdf:List that can be used in property constraints as value for sh:or to indicate that all values of a property must be either xsd:date or xsd:dateTime." ; + rdfs:label "Date or date time" ; +. +dash:DatePickerEditor + a dash:SingleEditor ; + rdfs:comment "An editor for xsd:date literals, offering a calendar-like date picker." ; + rdfs:label "Date picker editor" ; +. +dash:DateTimePickerEditor + a dash:SingleEditor ; + rdfs:comment "An editor for xsd:dateTime literals, offering a calendar-like date picker and a time selector." ; + rdfs:label "Date time picker editor" ; +. +dash:DepictionRole + a dash:PropertyRole ; + rdfs:comment "Depiction properties provide images representing the focus nodes. Typical examples may be a photo of an animal or the map of a country." ; + rdfs:label "Depiction" ; +. +dash:Deprecated + a dash:APIStatus ; + rdfs:comment "Features that have been marked deprecated will remain in the API but should no longer be used by new code and may get deleted in the foreseeable future (e.g., with the next major release)." ; + rdfs:label "deprecated" ; +. +dash:DescriptionRole + a dash:PropertyRole ; + rdfs:comment "Description properties should produce text literals that may be used as an introduction/summary of what a focus node does." ; + rdfs:label "Description" ; +. +dash:DetailsEditor + a dash:SingleEditor ; + rdfs:comment "An editor for non-literal values, typically displaying a nested form where the values of the linked resource can be edited directly on the \"parent\" form. Implementations that do not support this (yet) could fall back to an auto-complete widget." ; + rdfs:label "Details editor" ; +. +dash:DetailsViewer + a dash:SingleViewer ; + rdfs:comment "A Viewer for resources that shows the details of the value using its default view shape as a nested form-like display." ; + rdfs:label "Details viewer" ; +. +dash:Editor + a dash:ShapeClass ; + dash:abstract true ; + rdfs:comment "The class of widgets for editing value nodes." ; + rdfs:label "Editor" ; + rdfs:subClassOf dash:Widget ; +. +dash:EnumSelectEditor + a dash:SingleEditor ; + rdfs:comment "A drop-down editor for enumerated values (typically based on sh:in lists)." ; + rdfs:label "Enum select editor" ; +. +dash:Experimental + a dash:APIStatus ; + rdfs:comment "Features that are marked experimental can be used by early adopters but there is no guarantee that they will reach stable state." ; + rdfs:label "experimental" ; +. +dash:ExploreAction + a dash:ShapeClass ; + rdfs:comment "An action typically showing up in an Explore section of a selected resource. Cannot make changes to the data." ; + rdfs:label "Explore action" ; + rdfs:subClassOf dash:ResourceAction ; +. +dash:FailureResult + a rdfs:Class ; + rdfs:comment "A result representing a validation failure such as an unsupported recursion." ; + rdfs:label "Failure result" ; + rdfs:subClassOf sh:AbstractResult ; +. +dash:FailureTestCaseResult + a rdfs:Class ; + rdfs:comment "Represents a failure of a test case." ; + rdfs:label "Failure test case result" ; + rdfs:subClassOf dash:TestCaseResult ; +. +dash:FunctionTestCase + a dash:ShapeClass ; + rdfs:comment "A test case that verifies that a given SPARQL expression produces a given, expected result." ; + rdfs:label "Function test case" ; + rdfs:subClassOf dash:TestCase ; +. +dash:GraphService + a dash:ShapeClass ; + rdfs:comment "A service that does not apply to a specific resource (as ResourceService does) but operates on the whole graph. The focusNode variable will be the URI of the current base graph (e.g. as a NamedNode." ; + rdfs:label "Graph service" ; + rdfs:subClassOf dash:Service ; +. +dash:GraphStoreTestCase + a dash:ShapeClass ; + rdfs:comment "A test case that can be used to verify that an RDF file could be loaded (from a file) and that the resulting RDF graph is equivalent to a given TTL file." ; + rdfs:label "Graph store test case" ; + rdfs:subClassOf dash:TestCase ; +. +dash:GraphUpdate + a rdfs:Class ; + rdfs:comment "A suggestion consisting of added and/or deleted triples, represented as rdf:Statements via dash:addedTriple and dash:deletedTriple." ; + rdfs:label "Graph update" ; + rdfs:subClassOf dash:Suggestion ; +. +dash:GraphValidationTestCase + a dash:ShapeClass ; + rdfs:comment "A test case that performs SHACL constraint validation on the whole graph and compares the results with the expected validation results stored with the test case. By default this excludes meta-validation (i.e. the validation of the shape definitions themselves). If that's desired, set dash:validateShapes to true." ; + rdfs:label "Graph validation test case" ; + rdfs:subClassOf dash:ValidationTestCase ; +. +dash:HTMLOrStringOrLangString + a rdf:List ; + rdf:first [ + sh:datatype rdf:HTML ; + ] ; + rdf:rest ( + [ + sh:datatype xsd:string ; + ] + [ + sh:datatype rdf:langString ; + ] + ) ; + rdfs:comment "An rdf:List that can be used in property constraints as value for sh:or to indicate that all values of a property must be either rdf:HTML, xsd:string or rdf:langString (in that order of preference)." ; + rdfs:label "HTML or string or langString" ; +. +dash:HTMLViewer + a dash:SingleViewer ; + rdfs:comment "A Viewer for HTML encoded text from rdf:HTML literals, rendering as parsed HTML DOM elements. Also displays the language if the HTML has a lang attribute on its root DOM element." ; + rdfs:label "HTML viewer" ; +. +dash:HasValueInConstraintComponent + a sh:ConstraintComponent ; + rdfs:comment "A constraint component that can be used to express a constraint on property shapes so that one of the values of the property path must be a member of a given list of nodes." ; + rdfs:label "Has value in constraint component" ; + sh:message "At least one of the values must be in {$hasValueIn}" ; + sh:parameter dash:HasValueInConstraintComponent-hasValueIn ; + sh:propertyValidator [ + a sh:SPARQLSelectValidator ; + sh:prefixes ; + sh:select """SELECT $this +WHERE { + FILTER NOT EXISTS { + $this $PATH ?value . + GRAPH $shapesGraph { + $hasValueIn rdf:rest*/rdf:first ?value . + } + } +}""" ; + ] ; +. +dash:HasValueInConstraintComponent-hasValueIn + a sh:Parameter ; + sh:path dash:hasValueIn ; + dash:reifiableBy dash:ConstraintReificationShape ; + sh:description "At least one of the value nodes must be a member of the given list." ; + sh:name "has value in" ; + sh:node dash:ListShape ; +. +dash:HasValueTarget + a sh:SPARQLTargetType ; + rdfs:comment "A target type for all subjects where a given predicate has a certain object value." ; + rdfs:label "Has Value target" ; + rdfs:subClassOf sh:Target ; + sh:labelTemplate "All subjects where {$predicate} has value {$object}" ; + sh:parameter [ + a sh:Parameter ; + sh:path dash:object ; + sh:description "The value that is expected to be present." ; + sh:name "object" ; + ] ; + sh:parameter [ + a sh:Parameter ; + sh:path dash:predicate ; + sh:description "The predicate property." ; + sh:name "predicate" ; + sh:nodeKind sh:IRI ; + ] ; + sh:prefixes ; + sh:select """SELECT DISTINCT ?this +WHERE { + ?this $predicate $object . +}""" ; +. +dash:HasValueWithClassConstraintComponent + a sh:ConstraintComponent ; + rdfs:comment "A constraint component that can be used to express a constraint on property shapes so that one of the values of the property path must be an instance of a given class." ; + rdfs:label "Has value with class constraint component" ; + sh:message "At least one of the values must be an instance of class {$hasValueWithClass}" ; + sh:parameter dash:HasValueWithClassConstraintComponent-hasValueWithClass ; + sh:propertyValidator [ + a sh:SPARQLSelectValidator ; + sh:prefixes ; + sh:select """SELECT $this +WHERE { + FILTER NOT EXISTS { + $this $PATH ?value . + ?value a ?type . + ?type rdfs:subClassOf* $hasValueWithClass . + } +}""" ; + ] ; +. +dash:HasValueWithClassConstraintComponent-hasValueWithClass + a sh:Parameter ; + sh:path dash:hasValueWithClass ; + dash:reifiableBy dash:ConstraintReificationShape ; + sh:class rdfs:Class ; + sh:description "One of the values of the property path must be an instance of the given class." ; + sh:name "has value with class" ; + sh:nodeKind sh:IRI ; +. +dash:HyperlinkViewer + a dash:SingleViewer ; + rdfs:comment """A Viewer for literals, rendering as a hyperlink to a URL. + +For literals it assumes the lexical form is the URL. + +This is often used as default viewer for xsd:anyURI literals. Unsupported for blank nodes.""" ; + rdfs:label "Hyperlink viewer" ; +. +dash:IDRole + a dash:PropertyRole ; + rdfs:comment "ID properties are short strings or other literals that identify the focus node among siblings. Examples may include social security numbers." ; + rdfs:label "ID" ; +. +dash:IconRole + a dash:PropertyRole ; + rdfs:comment """Icon properties produce images that are typically small and almost square-shaped, and that may be displayed in the upper left corner of a focus node's display. Values should be xsd:string or xsd:anyURI literals or IRI nodes pointing at URLs. Those URLs should ideally be vector graphics such as .svg files. + +Instances of the same class often have the same icon, and this icon may be computed using a sh:values rule or as sh:defaultValue. + +If the value is a relative URL then those should be resolved against the server that delivered the surrounding page.""" ; + rdfs:label "Icon" ; +. +dash:ImageViewer + a dash:SingleViewer ; + rdfs:comment "A Viewer for URI values that are recognized as images by a browser, rendering as an image." ; + rdfs:label "Image viewer" ; +. +dash:IncludedScript + a dash:ShapeClass ; + rdfs:comment """The code associated with instances of this class will get injected into the generated APIs, as global code snippets. Typically used to declare libraries of utility functions or constants that are (compared to shape scripts) not necessarily associated with specific classes or shapes. + +Note that the JavaScript code stored in dash:js cannot use the export keyword because the code must also work in external scripts (such as on Node.js). Instead, you need to enumerate the exported symbols via dash:exports.""" ; + rdfs:label "Included script" ; + rdfs:subClassOf dash:Script ; +. +dash:IndexedConstraintComponent + a sh:ConstraintComponent ; + rdfs:comment "A constraint component that can be used to mark property shapes to be indexed, meaning that each of its value nodes must carry a dash:index from 0 to N." ; + rdfs:label "Indexed constraint component" ; + sh:parameter dash:IndexedConstraintComponent-indexed ; +. +dash:IndexedConstraintComponent-indexed + a sh:Parameter ; + sh:path dash:indexed ; + dash:reifiableBy dash:ConstraintReificationShape ; + sh:datatype xsd:boolean ; + sh:description "True to activate indexing for this property." ; + sh:maxCount 1 ; + sh:name "indexed" ; +. +dash:InferencingTestCase + a dash:ShapeClass ; + rdfs:comment "A test case to verify whether an inferencing engine is producing identical results to those stored as expected results." ; + rdfs:label "Inferencing test case" ; + rdfs:subClassOf dash:TestCase ; +. +dash:InlineViewer + a dash:MultiViewer ; + rdfs:comment "A multi-viewer that renders all values horizontally, in a more compact form that just a single value per row." ; + rdfs:label "Inline viewer" ; +. +dash:InstancesSelectEditor + a dash:SingleEditor ; + rdfs:comment "A drop-down editor for all instances of the target class (based on sh:class of the property)." ; + rdfs:label "Instances select editor" ; +. +dash:JSONTableViewer + a dash:SingleViewer ; + rdfs:comment """A tabular viewer for rdf:JSON literals with a lexical form in the following format: + +{ + vars: [ 'col1', 'col2' ], // These are the column keys + headerLabels: [ 'Column 1', 'Column 2' ], // Optional, for the column headers + bindings: [ // These become the rows + { + col1: { + lex: 'Value2', + datatype: '...#string', + }, + col2: { + uri: 'http://.../Instance', + label: 'Example Instance', + }, + }, + ... + ], +} + +The resulting table will use the headerLabels (if they exist) as column headers, otherwise derive the headers from the variable names. The vars must match the fields in the bindings. The table will contain one row for each binding. + +Using Active Data Shapes, you can construct such literals dynamically using a sh:values rule, e.g. + +ex:MyClass-myProperty + a sh:PropertyShape ; + sh:path ex:myProperty ; + sh:values [ + dash:js ""\" + DataViewers.createTableViewerJSON(focusNode.select(` + SELECT ?col1 ?col2 + WHERE { + $this ex:prop1 ?col1 . + $this ex:prop2 ?col2 . + } + `))""\" + ] . + +You may also produce the JSON literal programmatically in JavaScript, or assert the triples by other means.""" ; + rdfs:label "JSON table viewer" ; +. +dash:KeyInfoRole + a dash:PropertyRole ; + rdfs:comment "The Key info role may be assigned to properties that are likely of special interest to a reader, so that they should appear whenever a summary of a focus node is shown." ; + rdfs:label "Key info" ; +. +dash:LabelRole + a dash:PropertyRole ; + rdfs:comment "Properties with this role produce strings that may serve as display label for the focus nodes. Labels should be either plain string literals or strings with a language tag. The values should also be single-line." ; + rdfs:label "Label" ; +. +dash:LabelViewer + a dash:SingleViewer ; + rdfs:comment "A Viewer for URI resources, rendering as a hyperlink to that URI based on the display label of the resource. Also includes other ways of interacting with the URI such as opening a nested summary display." ; + rdfs:label "Label viewer" ; +. +dash:LangStringViewer + a dash:SingleViewer ; + rdfs:comment "A Viewer for literals with a language tag, rendering as the text plus a language indicator." ; + rdfs:label "LangString viewer" ; +. +dash:ListNodeShape + a sh:NodeShape ; + rdfs:comment "Defines constraints on what it means for a node to be a node within a well-formed RDF list. Note that this does not check whether the rdf:rest items are also well-formed lists as this would lead to unsupported recursion." ; + rdfs:label "List node shape" ; + sh:or ( + [ + sh:hasValue () ; + sh:property [ + a sh:PropertyShape ; + sh:path rdf:first ; + sh:maxCount 0 ; + ] ; + sh:property [ + a sh:PropertyShape ; + sh:path rdf:rest ; + sh:maxCount 0 ; + ] ; + ] + [ + sh:not [ + sh:hasValue () ; + ] ; + sh:property [ + a sh:PropertyShape ; + sh:path rdf:first ; + sh:maxCount 1 ; + sh:minCount 1 ; + ] ; + sh:property [ + a sh:PropertyShape ; + sh:path rdf:rest ; + sh:maxCount 1 ; + sh:minCount 1 ; + ] ; + ] + ) ; +. +dash:ListShape + a sh:NodeShape ; + rdfs:comment """Defines constraints on what it means for a node to be a well-formed RDF list. + +The focus node must either be rdf:nil or not recursive. Furthermore, this shape uses dash:ListNodeShape as a "helper" to walk through all members of the whole list (including itself).""" ; + rdfs:label "List shape" ; + sh:or ( + [ + sh:hasValue () ; + ] + [ + sh:not [ + sh:hasValue () ; + ] ; + sh:property [ + a sh:PropertyShape ; + sh:path [ + sh:oneOrMorePath rdf:rest ; + ] ; + dash:nonRecursive true ; + ] ; + ] + ) ; + sh:property [ + a sh:PropertyShape ; + sh:path [ + sh:zeroOrMorePath rdf:rest ; + ] ; + rdfs:comment "Each list member (including this node) must be have the shape dash:ListNodeShape." ; + sh:node dash:ListNodeShape ; + ] ; +. +dash:LiteralViewer + a dash:SingleViewer ; + rdfs:comment "A simple viewer for literals, rendering the lexical form of the value." ; + rdfs:label "Literal viewer" ; +. +dash:ModifyAction + a dash:ShapeClass ; + rdfs:comment "An action typically showing up in a Modify section of a selected resource. May make changes to the data." ; + rdfs:label "Modify action" ; + rdfs:subClassOf dash:ResourceAction ; +. +dash:MultiEditor + a dash:ShapeClass ; + rdfs:comment "An editor for multiple/all value nodes at once." ; + rdfs:label "Multi editor" ; + rdfs:subClassOf dash:Editor ; +. +dash:MultiFunction + a rdfs:Class ; + a sh:NodeShape ; + rdfs:comment """A multi-function is a function that can return zero or more result objects consisting of one or more result variables. While normal (SPARQL/SHACL) functions can only return a single result node, multi-functions may not only return multiple nodes but even multiple individual variables per solution. + +A common way of defining multi-functions is by wrapping a SPARQL SELECT query, using dash:SPARQLMultiFunction. However, some MultiFunctions (in TopBraid) may also be implemented natively.""" ; + rdfs:label "Multi-function" ; + rdfs:subClassOf sh:Parameterizable ; + sh:nodeKind sh:IRI ; +. +dash:MultiViewer + a dash:ShapeClass ; + rdfs:comment "A viewer for multiple/all values at once." ; + rdfs:label "Multi viewer" ; + rdfs:subClassOf dash:Viewer ; +. +dash:NoSuitableEditor + a dash:SingleEditor ; + rdfs:comment "An \"editor\" that simply informs the user that the values cannot be edited here, but for example through source code editing." ; + rdfs:label "No suitable editor" ; +. +dash:NodeExpressionViewer + a dash:SingleViewer ; + rdfs:comment "A viewer for SHACL Node Expressions."^^rdf:HTML ; + rdfs:label "Node expression viewer" ; +. +dash:NonRecursiveConstraintComponent + a sh:ConstraintComponent ; + rdfs:comment "Used to state that a property or path must not point back to itself." ; + rdfs:label "Non-recursive constraint component" ; + sh:message "Points back at itself (recursively)" ; + sh:parameter dash:NonRecursiveConstraintComponent-nonRecursive ; + sh:propertyValidator [ + a sh:SPARQLSelectValidator ; + sh:prefixes ; + sh:select """SELECT DISTINCT $this ($this AS ?value) +WHERE { + { + FILTER (?nonRecursive) + } + $this $PATH $this . +}""" ; + ] ; +. +dash:NonRecursiveConstraintComponent-nonRecursive + a sh:Parameter ; + sh:path dash:nonRecursive ; + dash:reifiableBy dash:ConstraintReificationShape ; + sh:datatype xsd:boolean ; + sh:description """Used to state that a property or path must not point back to itself. + +For example, "a person cannot have itself as parent" can be expressed by setting dash:nonRecursive=true for a given sh:path. + +To express that a person cannot have itself among any of its (recursive) parents, use a sh:path with the + operator such as ex:parent+.""" ; + sh:maxCount 1 ; + sh:name "non-recursive" ; +. +dash:None + a sh:NodeShape ; + rdfs:comment "A Shape that is no node can conform to." ; + rdfs:label "None" ; + sh:in () ; +. +dash:ParameterConstraintComponent + a sh:ConstraintComponent ; + rdfs:comment "A constraint component that can be used to verify that all value nodes conform to the given Parameter."@en ; + rdfs:label "Parameter constraint component"@en ; + sh:parameter dash:ParameterConstraintComponent-parameter ; +. +dash:ParameterConstraintComponent-parameter + a sh:Parameter ; + sh:path sh:parameter ; +. +dash:PrimaryKeyConstraintComponent + a sh:ConstraintComponent ; + dash:localConstraint true ; + rdfs:comment "Enforces a constraint that the given property (sh:path) serves as primary key for all resources in the target of the shape. If a property has been declared to be the primary key then each resource must have exactly one value for that property. Furthermore, the URIs of those resources must start with a given string (dash:uriStart), followed by the URL-encoded primary key value. For example if dash:uriStart is \"http://example.org/country-\" and the primary key for an instance is \"de\" then the URI must be \"http://example.org/country-de\". Finally, as a result of the URI policy, there can not be any other resource with the same value under the same primary key policy." ; + rdfs:label "Primary key constraint component" ; + sh:labelTemplate "The property {?predicate} is the primary key and URIs start with {?uriStart}" ; + sh:message "Violation of primary key constraint" ; + sh:parameter dash:PrimaryKeyConstraintComponent-uriStart ; + sh:propertyValidator [ + a sh:SPARQLSelectValidator ; + sh:prefixes ; + sh:select """SELECT DISTINCT $this +WHERE { + FILTER ( + # Must have a value for the primary key + NOT EXISTS { ?this $PATH ?any } + || + # Must have no more than one value for the primary key + EXISTS { + ?this $PATH ?value1 . + ?this $PATH ?value2 . + FILTER (?value1 != ?value2) . + } + || + # The value of the primary key must align with the derived URI + EXISTS { + { + ?this $PATH ?value . + FILTER NOT EXISTS { ?this $PATH ?value2 . FILTER (?value != ?value2) } + } + BIND (CONCAT($uriStart, ENCODE_FOR_URI(str(?value))) AS ?uri) . + FILTER (str(?this) != ?uri) . + } + ) +}""" ; + ] ; +. +dash:PrimaryKeyConstraintComponent-uriStart + a sh:Parameter ; + sh:path dash:uriStart ; + dash:reifiableBy dash:ConstraintReificationShape ; + sh:datatype xsd:string ; + sh:description "The start of the URIs of well-formed resources. If specified then the associated property/path serves as \"primary key\" for all target nodes (instances). All such target nodes need to have a URI that starts with the given string, followed by the URI-encoded value of the primary key property." ; + sh:maxCount 1 ; + sh:name "URI start" ; +. +dash:PropertyAutoCompleteEditor + a dash:SingleEditor ; + rdfs:comment "An editor for properties that are either defined as instances of rdf:Property or used as IRI values of sh:path. The component uses auto-complete to find these properties by their rdfs:labels or sh:names." ; + rdfs:label "Property auto-complete editor" ; +. +dash:PropertyLabelViewer + a dash:SingleViewer ; + rdfs:comment "A viewer for properties that renders a hyperlink using the display label or sh:name, allowing users to either navigate to the rdf:Property resource or the property shape definition. Should be used in conjunction with PropertyAutoCompleteEditor." ; + rdfs:label "Property label viewer" ; +. +dash:PropertyRole + a rdfs:Class ; + a sh:NodeShape ; + rdfs:comment "The class of roles that a property (shape) may take for its focus nodes." ; + rdfs:label "Property role" ; + rdfs:subClassOf rdfs:Resource ; +. +dash:QueryTestCase + a dash:ShapeClass ; + rdfs:comment "A test case running a given SPARQL SELECT query and comparing its results with those stored as JSON Result Set in the expected result property." ; + rdfs:label "Query test case" ; + rdfs:subClassOf dash:TestCase ; + rdfs:subClassOf sh:SPARQLSelectExecutable ; +. +dash:ReifiableByConstraintComponent + a sh:ConstraintComponent ; + rdfs:label "Reifiable-by constraint component" ; + sh:labelTemplate "Reifiable by {$reifiableBy}" ; + sh:parameter dash:ReifiableByConstraintComponent-reifiableBy ; +. +dash:ReifiableByConstraintComponent-reifiableBy + a sh:Parameter ; + sh:path dash:reifiableBy ; + sh:class sh:NodeShape ; + sh:description "Can be used to specify the node shape that may be applied to reified statements produced by a property shape. The property shape must have a URI resource as its sh:path. The values of this property must be node shapes. User interfaces can use this information to determine which properties to present to users when reified statements are explored or edited. Also, SHACL validators can use it to determine how to validate reified triples. Use dash:None to indicate that no reification should be permitted." ; + sh:maxCount 1 ; + sh:name "reifiable by" ; + sh:nodeKind sh:IRI ; +. +dash:ResourceAction + a dash:ShapeClass ; + dash:abstract true ; + rdfs:comment "An Action that can be executed for a selected resource. Such Actions show up in context menus once they have been assigned a sh:group." ; + rdfs:label "Resource action" ; + rdfs:subClassOf dash:Action ; +. +dash:ResourceService + a dash:ShapeClass ; + rdfs:comment "A Service that can (and must) be applied to a given resource as focus node. Use dash:resourceService to link a class to the services that apply to its instances." ; + rdfs:label "Resource service" ; + rdfs:subClassOf dash:Service ; +. +dash:RichTextEditor + a dash:SingleEditor ; + rdfs:comment "A rich text editor to enter the lexical value of a literal and a drop down to select language. The selected language is stored in the HTML lang attribute of the root node in the HTML DOM tree." ; + rdfs:label "Rich text editor" ; +. +dash:RootClassConstraintComponent + a sh:ConstraintComponent ; + rdfs:comment "A constraint component defining the parameter dash:rootClass, which restricts the values to be either the root class itself or one of its subclasses. This is typically used in conjunction with properties that have rdfs:Class as their type." ; + rdfs:label "Root class constraint component" ; + sh:labelTemplate "Root class {$rootClass}" ; + sh:message "Value must be subclass of {$rootClass}" ; + sh:parameter dash:RootClassConstraintComponent-rootClass ; + sh:validator dash:hasRootClass ; +. +dash:RootClassConstraintComponent-rootClass + a sh:Parameter ; + sh:path dash:rootClass ; + dash:reifiableBy dash:ConstraintReificationShape ; + sh:class rdfs:Class ; + sh:description "The root class." ; + sh:name "root class" ; + sh:nodeKind sh:IRI ; +. +dash:SPARQLConstructTemplate + a rdfs:Class ; + rdfs:comment "Encapsulates one or more SPARQL CONSTRUCT queries that can be parameterized. Parameters will become pre-bound variables in the queries." ; + rdfs:label "SPARQL CONSTRUCT template" ; + rdfs:subClassOf sh:Parameterizable ; + rdfs:subClassOf sh:SPARQLConstructExecutable ; +. +dash:SPARQLMultiFunction + a rdfs:Class ; + a sh:NodeShape ; + rdfs:comment "A multi-function based on a SPARQL SELECT query. The query gets executed with the arguments pre-bound to the variables declared as parameters. The results of the multi-function are all result bindings from the SPARQL result set." ; + rdfs:label "SPARQL multi-function" ; + rdfs:subClassOf dash:MultiFunction ; + rdfs:subClassOf sh:SPARQLSelectExecutable ; +. +dash:SPARQLSelectTemplate + a rdfs:Class ; + rdfs:comment "Encapsulates a SPARQL SELECT query that can be parameterized. Parameters will become pre-bound variables in the query." ; + rdfs:label "SPARQL SELECT template" ; + rdfs:subClassOf sh:Parameterizable ; + rdfs:subClassOf sh:SPARQLSelectExecutable ; +. +dash:SPARQLUpdateSuggestionGenerator + a rdfs:Class ; + rdfs:comment """A SuggestionGenerator based on a SPARQL UPDATE query (sh:update), producing an instance of dash:GraphUpdate. The INSERTs become dash:addedTriple and the DELETEs become dash:deletedTriple. The WHERE clause operates on the data graph with the pre-bound variables $focusNode, $predicate and $value, as well as the other pre-bound variables for the parameters of the constraint. + +In many cases, there may be multiple possible suggestions to fix a problem. For example, with sh:maxLength there are many ways to slice a string. In those cases, the system will first iterate through the result variables from a SELECT query (sh:select) and apply these results as pre-bound variables into the UPDATE query.""" ; + rdfs:label "SPARQL UPDATE suggestion generator" ; + rdfs:subClassOf dash:SuggestionGenerator ; + rdfs:subClassOf sh:SPARQLSelectExecutable ; + rdfs:subClassOf sh:SPARQLUpdateExecutable ; +. +dash:Script + a dash:ShapeClass ; + rdfs:comment "An executable unit implemented in one or more languages such as JavaScript." ; + rdfs:label "Script" ; + rdfs:subClassOf rdfs:Resource ; +. +dash:ScriptAPIGenerationRules + a sh:PropertyGroup ; + rdfs:label "Script API Generation Rules" ; +. +dash:ScriptAPIShape + a sh:NodeShape ; + rdfs:comment "Defines the properties that instruct the ADS Script API generator about what prefixes, constants and classes to generate." ; + rdfs:label "Script API" ; + sh:property dash:ScriptAPIShape-generateClass ; + sh:property dash:ScriptAPIShape-generatePrefixClasses ; + sh:property dash:ScriptAPIShape-generatePrefixConstants ; + sh:targetClass owl:Ontology ; +. +dash:ScriptAPIShape-generateClass + a sh:PropertyShape ; + sh:path dash:generateClass ; + sh:class sh:NodeShape ; + sh:description "The API generator will produce classes for each value of this property and all its subclasses and superclasses." ; + sh:group dash:ScriptAPIGenerationRules ; + sh:name "generate class" ; + sh:nodeKind sh:IRI ; + sh:order "0"^^xsd:decimal ; +. +dash:ScriptAPIShape-generatePrefixClasses + a sh:PropertyShape ; + sh:path dash:generatePrefixClasses ; + sh:datatype xsd:string ; + sh:description "If a prefix (such as \"edg\") is listed here then the API generator will produce classes for all RDFS classes or node shapes from the associated namespace." ; + sh:group dash:ScriptAPIGenerationRules ; + sh:name "generate prefix classes" ; + sh:order "15"^^xsd:decimal ; +. +dash:ScriptAPIShape-generatePrefixConstants + a sh:PropertyShape ; + sh:path dash:generatePrefixConstants ; + sh:datatype xsd:string ; + sh:description "If a prefix (such as \"edg\") is listed here then the API generator will produce constants for class, datatype, shape and property names." ; + sh:group dash:ScriptAPIGenerationRules ; + sh:name "generate prefix constants" ; + sh:order "10"^^xsd:decimal ; +. +dash:ScriptConstraint + a dash:ShapeClass ; + rdfs:comment """The class of constraints that are based on Scripts. Depending on whether dash:onAllValues is set to true, these scripts can access the following pre-assigned variables: + +- focusNode: the focus node of the constraint (a NamedNode) +- if dash:onAllValues is not true: value: the current value node (e.g. a JavaScript string for xsd:string literals, a number for numeric literals or true or false for xsd:boolean literals. All other literals become LiteralNodes, and non-literals become instances of NamedNode) +- if dash:onAllValues is true: values: an array of current value nodes, as above. + +If the expression returns an array then each array member will be mapped to one validation result, following the mapping rules below. + +For string results, a validation result will use the string as sh:resultMessage. +For boolean results, a validation result will be produced if the result is false (true means no violation). + +For object results, a validation result will be produced using the value of the field "message" of the object as result message. If the field "value" has a value then this will become the sh:value in the violation. + +Unless another sh:message has been directly returned, the sh:message of the dash:ScriptConstraint will be used, similar to sh:message at SPARQL Constraints. These sh:messages can access the values {$focusNode}, {$value} etc as template variables.""" ; + rdfs:label "Script constraint" ; + rdfs:subClassOf dash:Script ; +. +dash:ScriptConstraintComponent + a sh:ConstraintComponent ; + rdfs:label "Script constraint component" ; + sh:parameter dash:ScriptConstraintComponent-scriptConstraint ; +. +dash:ScriptConstraintComponent-scriptConstraint + a sh:Parameter ; + sh:path dash:scriptConstraint ; + sh:class dash:ScriptConstraint ; + sh:description "The Script constraint(s) to apply." ; + sh:name "script constraint" ; +. +dash:ScriptFunction + a rdfs:Class ; + a sh:NodeShape ; + rdfs:comment """Script functions can be used from SPARQL queries and will be injected into the generated prefix object (in JavaScript, for ADS scripts). The dash:js will be inserted into a generated JavaScript function and therefore needs to use the return keyword to produce results. These JS snippets can access the parameter values based on the local name of the sh:Parameter's path. For example ex:value can be accessed using value. + +SPARQL use note: Since these functions may be used from any data graph and any shapes graph, they must not rely on any API apart from what's available in the shapes graph that holds the rdf:type triple of the function itself. In other words, at execution time from SPARQL, the ADS shapes graph will be the home graph of the function's declaration.""" ; + rdfs:label "Script function" ; + rdfs:subClassOf dash:Script ; + rdfs:subClassOf sh:Function ; +. +dash:ScriptSuggestionGenerator + a dash:ShapeClass ; + rdfs:comment """A Suggestion Generator that is backed by an Active Data Shapes script. The script needs to return a JSON object or an array of JSON objects if it shall generate multiple suggestions. It may also return null to indicate that nothing was suggested. Note that the whole script is evaluated as a (JavaScript) expression, and those will use the last value as result. So simply putting an object at the end of your script should do. Alternatively, define the bulk of the operation as a function and simply call that function in the script. + +Each response object can have the following fields: + +{ + message: "The human readable message", // Defaults to the rdfs:label(s) of the suggestion generator + add: [ // An array of triples to add, each triple as an array with three nodes + [ subject, predicate, object ], + [ ... ] + ], + delete: [ + ... like add, for the triples to delete + ] +} + +Suggestions with neither added nor deleted triples will be discarded. + +At execution time, the script operates on the data graph as the active graph, with the following pre-bound variables: +- focusNode: the NamedNode that is the sh:focusNode of the validation result +- predicate: the NamedNode representing the predicate of the validation result, assuming sh:resultPath is a URI +- value: the value node from the validation result's sh:value, cast into the most suitable JS object +- the other pre-bound variables for the parameters of the constraint, e.g. in a sh:maxCount constraint it would be maxCount + +The script will be executed in read-only mode, i.e. it cannot modify the graph. + +Example with dash:js: + +({ + message: `Copy labels into ${graph.localName(predicate)}`, + add: focusNode.values(rdfs.label).map(label => + [ focusNode, predicate, label ] + ) +})""" ; + rdfs:label "Script suggestion generator" ; + rdfs:subClassOf dash:Script ; + rdfs:subClassOf dash:SuggestionGenerator ; +. +dash:ScriptTestCase + a dash:ShapeClass ; + rdfs:comment """A test case that evaluates a script. Requires exactly one value for dash:js and will operate on the test case's graph (with imports) as both data and shapes graph. + +Supports read-only scripts only at this stage.""" ; + rdfs:label "Script test case" ; + rdfs:subClassOf dash:Script ; + rdfs:subClassOf dash:TestCase ; +. +dash:ScriptValidator + a dash:ShapeClass ; + rdfs:comment """A SHACL validator based on an Active Data Shapes script. + +See the comment at dash:ScriptConstraint for the basic evaluation approach. Note that in addition to focusNode and value/values, the script can access pre-bound variables for each declared argument of the constraint component.""" ; + rdfs:label "Script validator" ; + rdfs:subClassOf dash:Script ; + rdfs:subClassOf sh:Validator ; +. +dash:Service + a dash:ShapeClass ; + dash:abstract true ; + rdfs:comment "A script that gets exposed as a web service, e.g. /tbl/service/ex/MyService" ; + rdfs:label "Service" ; + rdfs:subClassOf dash:Script ; + rdfs:subClassOf sh:Parameterizable ; +. +dash:ShapeClass + a dash:ShapeClass ; + dash:hidden true ; + rdfs:comment "A class that is also a node shape. This class can be used as rdf:type instead of the combination of rdfs:Class and sh:NodeShape." ; + rdfs:label "Shape class" ; + rdfs:subClassOf rdfs:Class ; + rdfs:subClassOf sh:NodeShape ; +. +dash:ShapeScript + a rdfs:Class ; + rdfs:comment "A shape script contains extra code that gets injected into the API for the associated node shape. In particular you can use this to define additional functions that operate on the current focus node (the this variable in JavaScript)." ; + rdfs:label "Shape script" ; + rdfs:subClassOf dash:Script ; +. +dash:SingleEditor + a dash:ShapeClass ; + rdfs:comment "An editor for individual value nodes." ; + rdfs:label "Single editor" ; + rdfs:subClassOf dash:Editor ; +. +dash:SingleLineConstraintComponent + a sh:ConstraintComponent ; + rdfs:comment """A constraint component that can be used to declare that all values that are literals must have a lexical form that contains no line breaks ('\\n' or '\\r'). + +User interfaces may use the dash:singleLine flag to prefer a text field over a (multi-line) text area.""" ; + rdfs:label "Single line constraint component" ; + sh:message "Must not contain line breaks." ; + sh:parameter dash:SingleLineConstraintComponent-singleLine ; + sh:validator [ + a sh:SPARQLAskValidator ; + sh:ask """ASK { + FILTER (!$singleLine || !isLiteral($value) || (!contains(str($value), '\\n') && !contains(str($value), '\\r'))) +}""" ; + sh:prefixes ; + ] ; +. +dash:SingleLineConstraintComponent-singleLine + a sh:Parameter ; + sh:path dash:singleLine ; + dash:reifiableBy dash:ConstraintReificationShape ; + sh:datatype xsd:boolean ; + sh:description "True to state that the lexical form of literal value nodes must not contain any line breaks. False to state that line breaks are explicitly permitted." ; + sh:maxCount 1 ; + sh:name "single line" ; +. +dash:SingleViewer + a dash:ShapeClass ; + rdfs:comment "A viewer for a single value." ; + rdfs:label "Single viewer" ; + rdfs:subClassOf dash:Viewer ; +. +dash:Stable + a dash:APIStatus ; + rdfs:comment "Features that have been marked stable are deemed of good quality and can be used until marked deprecated." ; + rdfs:label "stable" ; +. +dash:StemConstraintComponent + a sh:ConstraintComponent ; + dash:staticConstraint true ; + rdfs:comment "A constraint component that can be used to verify that every value node is an IRI and the IRI starts with a given string value."@en ; + rdfs:label "Stem constraint component"@en ; + sh:labelTemplate "Value needs to have stem {$stem}" ; + sh:message "Value does not have stem {$stem}" ; + sh:parameter dash:StemConstraintComponent-stem ; + sh:validator dash:hasStem ; +. +dash:StemConstraintComponent-stem + a sh:Parameter ; + sh:path dash:stem ; + dash:reifiableBy dash:ConstraintReificationShape ; + sh:datatype xsd:string ; + sh:description "If specified then every value node must be an IRI and the IRI must start with the given string value." ; + sh:maxCount 1 ; + sh:name "stem" ; +. +dash:StringOrLangString + a rdf:List ; + rdf:first [ + sh:datatype xsd:string ; + ] ; + rdf:rest ( + [ + sh:datatype rdf:langString ; + ] + ) ; + rdfs:comment "An rdf:List that can be used in property constraints as value for sh:or to indicate that all values of a property must be either xsd:string or rdf:langString." ; + rdfs:label "String or langString" ; +. +dash:StringOrLangStringOrHTML + a rdf:List ; + rdf:first [ + sh:datatype xsd:string ; + ] ; + rdf:rest ( + [ + sh:datatype rdf:langString ; + ] + [ + sh:datatype rdf:HTML ; + ] + ) ; + rdfs:comment "An rdf:List that can be used in property constraints as value for sh:or to indicate that all values of a property must be either xsd:string, rdf:langString or rdf:HTML (in that order of preference)." ; + rdfs:label "string or langString or HTML" ; +. +dash:SubClassEditor + a dash:SingleEditor ; + rdfs:comment "An editor for properties that declare a dash:rootClass. The editor allows selecting either the class itself or one of its subclasses." ; + rdfs:label "Sub-Class editor" ; +. +dash:SubSetOfConstraintComponent + a sh:ConstraintComponent ; + dash:localConstraint true ; + rdfs:comment "A constraint component that can be used to state that the set of value nodes must be a subset of the value of a given property." ; + rdfs:label "Sub set of constraint component" ; + sh:message "Must be one of the values of {$subSetOf}" ; + sh:parameter dash:SubSetOfConstraintComponent-subSetOf ; + sh:propertyValidator [ + a sh:SPARQLAskValidator ; + sh:ask """ASK { + $this $subSetOf $value . +}""" ; + sh:prefixes ; + ] ; +. +dash:SubSetOfConstraintComponent-subSetOf + a sh:Parameter ; + sh:path dash:subSetOf ; + dash:editor dash:PropertyAutoCompleteEditor ; + dash:reifiableBy dash:ConstraintReificationShape ; + dash:viewer dash:PropertyLabelViewer ; + sh:description "Can be used to state that all value nodes must also be values of a specified other property at the same focus node." ; + sh:name "sub-set of" ; + sh:nodeKind sh:IRI ; +. +dash:SuccessResult + a rdfs:Class ; + rdfs:comment "A result representing a successfully validated constraint." ; + rdfs:label "Success result" ; + rdfs:subClassOf sh:AbstractResult ; +. +dash:SuccessTestCaseResult + a rdfs:Class ; + rdfs:comment "Represents a successful run of a test case." ; + rdfs:label "Success test case result" ; + rdfs:subClassOf dash:TestCaseResult ; +. +dash:Suggestion + a rdfs:Class ; + dash:abstract true ; + rdfs:comment "Base class of suggestions that modify a graph to \"fix\" the source of a validation result." ; + rdfs:label "Suggestion" ; + rdfs:subClassOf rdfs:Resource ; +. +dash:SuggestionGenerator + a rdfs:Class ; + dash:abstract true ; + rdfs:comment "Base class of objects that can generate suggestions (added or deleted triples) for a validation result of a given constraint component." ; + rdfs:label "Suggestion generator" ; + rdfs:subClassOf rdfs:Resource ; +. +dash:SuggestionResult + a rdfs:Class ; + rdfs:comment "Class of results that have been produced as suggestions, not through SHACL validation. How the actual results are produced is up to implementers. Each instance of this class should have values for sh:focusNode, sh:resultMessage, sh:resultSeverity (suggested default: sh:Info), and dash:suggestion to point at one or more suggestions." ; + rdfs:label "Suggestion result" ; + rdfs:subClassOf sh:AbstractResult ; +. +dash:SymmetricConstraintComponent + a sh:ConstraintComponent ; + rdfs:comment "A contraint component for property shapes to validate that a property is symmetric. For symmetric properties, if A relates to B then B must relate to A." ; + rdfs:label "Symmetric constraint component" ; + sh:message "Symmetric value expected" ; + sh:parameter dash:SymmetricConstraintComponent-symmetric ; + sh:propertyValidator [ + a sh:SPARQLSelectValidator ; + sh:prefixes ; + sh:select """SELECT $this ?value { + FILTER ($symmetric) . + $this $PATH ?value . + FILTER NOT EXISTS { + ?value $PATH $this . + } +}""" ; + ] ; +. +dash:SymmetricConstraintComponent-symmetric + a sh:Parameter ; + sh:path dash:symmetric ; + dash:reifiableBy dash:ConstraintReificationShape ; + sh:datatype xsd:boolean ; + sh:description "If set to true then if A relates to B then B must relate to A." ; + sh:maxCount 1 ; + sh:name "symmetric" ; +. +dash:TestCase + a dash:ShapeClass ; + dash:abstract true ; + rdfs:comment "A test case to verify that a (SHACL-based) feature works as expected." ; + rdfs:label "Test case" ; + rdfs:subClassOf rdfs:Resource ; +. +dash:TestCaseResult + a rdfs:Class ; + dash:abstract true ; + rdfs:comment "Base class for results produced by running test cases." ; + rdfs:label "Test case result" ; + rdfs:subClassOf sh:AbstractResult ; +. +dash:TestEnvironment + a rdfs:Class ; + dash:abstract true ; + rdfs:comment "Abstract base class for test environments, holding information on how to set up a test case." ; + rdfs:label "Test environment" ; + rdfs:subClassOf rdfs:Resource ; +. +dash:TextAreaEditor + a dash:SingleEditor ; + rdfs:comment "A multi-line text area to enter the value of a literal." ; + rdfs:label "Text area editor" ; +. +dash:TextAreaWithLangEditor + a dash:SingleEditor ; + rdfs:comment "A multi-line text area to enter the value of a literal and a drop down to select a language." ; + rdfs:label "Text area with lang editor" ; +. +dash:TextFieldEditor + a dash:SingleEditor ; + rdfs:comment """A simple input field to enter the value of a literal, without the ability to change language or datatype. + +This is the fallback editor for any literal if no other editors are more suitable.""" ; + rdfs:label "Text field editor" ; +. +dash:TextFieldWithLangEditor + a dash:SingleEditor ; + rdfs:comment "A single-line input field to enter the value of a literal and a drop down to select language, which is mandatory unless xsd:string is among the permissible datatypes." ; + rdfs:label "Text field with lang editor" ; +. +dash:URIEditor + a dash:SingleEditor ; + rdfs:comment "An input field to enter the URI of a resource, e.g. rdfs:seeAlso links or images." ; + rdfs:label "URI editor" ; +. +dash:URIViewer + a dash:SingleViewer ; + rdfs:comment "A Viewer for URI resources, rendering as a hyperlink to that URI. Also includes other ways of interacting with the URI such as opening a nested summary display." ; + rdfs:label "URI viewer" ; +. +dash:UniqueValueForClassConstraintComponent + a sh:ConstraintComponent ; + rdfs:comment "A constraint component that can be used to state that the values of a property must be unique for all instances of a given class (and its subclasses)." ; + rdfs:label "Unique value for class constraint component" ; + sh:labelTemplate "Values must be unique among all instances of {?uniqueValueForClass}" ; + sh:parameter dash:UniqueValueForClassConstraintComponent-uniqueValueForClass ; + sh:propertyValidator [ + a sh:SPARQLSelectValidator ; + sh:message "Value {?value} must be unique but is also used by {?other}" ; + sh:prefixes ; + sh:select """SELECT DISTINCT $this ?value ?other +WHERE { + { + $this $PATH ?value . + ?other $PATH ?value . + FILTER (?other != $this) . + } + ?other a ?type . + ?type rdfs:subClassOf* $uniqueValueForClass . +}""" ; + ] ; +. +dash:UniqueValueForClassConstraintComponent-uniqueValueForClass + a sh:Parameter ; + sh:path dash:uniqueValueForClass ; + dash:reifiableBy dash:ConstraintReificationShape ; + sh:class rdfs:Class ; + sh:description "States that the values of the property must be unique for all instances of a given class (and its subclasses)." ; + sh:name "unique value for class" ; + sh:nodeKind sh:IRI ; +. +dash:UntrustedHTMLViewer + a dash:SingleViewer ; + rdfs:comment "A Viewer for HTML content from untrusted sources. This viewer will sanitize the HTML before rendering. Any a, button, checkbox, form, hidden, input, img, script, select, style and textarea tags and class and style attributes will be removed." ; + rdfs:label "Untrusted HTML viewer" ; +. +dash:ValidationTestCase + a dash:ShapeClass ; + dash:abstract true ; + rdfs:comment "Abstract superclass for test cases concerning SHACL constraint validation. Future versions may add new kinds of validatin test cases, e.g. to validate a single resource only." ; + rdfs:label "Validation test case" ; + rdfs:subClassOf dash:TestCase ; +. +dash:ValueTableViewer + a dash:MultiViewer ; + rdfs:comment "A viewer that renders all values of a given property as a table, with one value per row, and the columns defined by the shape that is the sh:node or sh:class of the property." ; + rdfs:label "Value table viewer" ; +. +dash:Viewer + a dash:ShapeClass ; + dash:abstract true ; + rdfs:comment "The class of widgets for viewing value nodes." ; + rdfs:label "Viewer" ; + rdfs:subClassOf dash:Widget ; +. +dash:Widget + a dash:ShapeClass ; + dash:abstract true ; + rdfs:comment "Base class of user interface components that can be used to display or edit value nodes." ; + rdfs:label "Widget" ; + rdfs:subClassOf rdfs:Resource ; +. +dash:abstract + a rdf:Property ; + rdfs:comment "Indicates that a class is \"abstract\" and cannot be used in asserted rdf:type triples. Only non-abstract subclasses of abstract classes should be instantiated directly." ; + rdfs:domain rdfs:Class ; + rdfs:label "abstract" ; + rdfs:range xsd:boolean ; +. +dash:actionGroup + a rdf:Property ; + rdfs:comment "Links an Action with the ActionGroup that it should be arranged in." ; + rdfs:domain dash:Action ; + rdfs:label "action group" ; + rdfs:range dash:ActionGroup ; +. +dash:actionIconClass + a rdf:Property ; + rdfs:comment "The (CSS) class of an Action for display purposes alongside the label." ; + rdfs:domain dash:Action ; + rdfs:label "action icon class" ; + rdfs:range xsd:string ; +. +dash:addedTriple + a rdf:Property ; + rdfs:comment "May link a dash:GraphUpdate with one or more triples (represented as instances of rdf:Statement) that should be added to fix the source of the result." ; + rdfs:domain dash:GraphUpdate ; + rdfs:label "added triple" ; + rdfs:range rdf:Statement ; +. +dash:all + a rdfs:Resource ; + rdfs:comment "Represents all users/roles, for example as a possible value of the default view for role property." ; + rdfs:label "all" ; +. +dash:apiStatus + a rdf:Property ; + rdfs:comment "Defines how and whether the associated feature is part of an external API. APIs may be implemented as (REST) web services, via GraphQL or ADS Script APIs." ; + rdfs:label "API status" ; + rdfs:range dash:APIStatus ; +. +dash:applicableToClass + a rdf:Property ; + rdfs:comment "Can be used to state that a shape is applicable to instances of a given class. This is a softer statement than \"target class\": a target means that all instances of the class must conform to the shape. Being applicable to simply means that the shape may apply to (some) instances of the class. This information can be used by algorithms or humans." ; + rdfs:domain sh:Shape ; + rdfs:label "applicable to class" ; + rdfs:range rdfs:Class ; +. +dash:cachable + a rdf:Property ; + rdfs:comment "If set to true then the results of the SHACL function can be cached in between invocations with the same arguments. In other words, they are stateless and do not depend on triples in any graph, or the current time stamp etc." ; + rdfs:domain sh:Function ; + rdfs:label "cachable" ; + rdfs:range xsd:boolean ; +. +dash:closedByTypes + a rdf:Property ; + rdfs:label "closed by types" ; +. +dash:coExistsWith + a rdf:Property ; + rdfs:comment "Specifies a property that must have a value whenever the property path has a value, and must have no value whenever the property path has no value." ; + rdfs:label "co-exists with" ; + rdfs:range rdf:Property ; +. +dash:composite + a rdf:Property ; + rdfs:comment "Can be used to indicate that a property/path represented by a property constraint represents a composite relationship. In a composite relationship, the life cycle of a \"child\" object (value of the property/path) depends on the \"parent\" object (focus node). If the parent gets deleted, then the child objects should be deleted, too. Tools may use dash:composite (if set to true) to implement cascading delete operations." ; + rdfs:domain sh:PropertyShape ; + rdfs:label "composite" ; + rdfs:range xsd:boolean ; +. +dash:contextFree + a rdf:Property ; + rdfs:comment "Used to mark certain parameterizables as context-free, meaning that the outcome of a process does not depend on the currently active query graph." ; + rdfs:label "context-free" ; + rdfs:range xsd:boolean ; +. +dash:defaultLang + a rdf:Property ; + rdfs:comment "Can be used to annotate a graph (usually the owl:Ontology) with the default language that tools should suggest for new literal values. For example, predominantly English graphs should have \"en\" as default language." ; + rdfs:domain owl:Ontology ; + rdfs:label "default language" ; + rdfs:range xsd:string ; +. +dash:defaultViewForRole + a rdf:Property ; + rdfs:comment "Links a node shape with the roles for which it shall be used as default view. User interfaces can use these values to select how to present a given RDF resource. The values of this property are URIs representing a group of users or agents. There is a dedicated URI dash:all representing all users." ; + rdfs:domain sh:NodeShape ; + rdfs:label "default view for role" ; +. +dash:deletedTriple + a rdf:Property ; + rdfs:comment "May link a dash:GraphUpdate result with one or more triples (represented as instances of rdf:Statement) that should be deleted to fix the source of the result." ; + rdfs:domain dash:GraphUpdate ; + rdfs:label "deleted triple" ; + rdfs:range rdf:Statement ; +. +dash:dependencyPredicate + a rdf:Property ; + rdfs:comment "Can be used in dash:js node expressions to enumerate the predicates that the computation of the values may depend on. This can be used by clients to determine whether an edit requires re-computation of values on a form or elsewhere. For example, if the dash:js is something like \"focusNode.firstName + focusNode.lastName\" then the dependency predicates should be ex:firstName and ex:lastName." ; + rdfs:label "dependency predicate" ; + rdfs:range rdf:Property ; +. +dash:detailsEndpoint + a rdf:Property ; + rdfs:comment """Can be used to link a SHACL property shape with the URL of a SPARQL endpoint that may contain further RDF triples for the value nodes delivered by the property. This can be used to inform a processor that it should switch to values from an external graph when the user wants to retrieve more information about a value. + +This property should be regarded as an "annotation", i.e. it does not have any impact on validation or other built-in SHACL features. However, selected tools may want to use this information. One implementation strategy would be to periodically fetch the values specified by the sh:node or sh:class shape associated with the property, using the property shapes in that shape, and add the resulting triples into the main query graph. + +An example value is "https://query.wikidata.org/sparql".""" ; + rdfs:label "details endpoint" ; +. +dash:detailsGraph + a rdf:Property ; + rdfs:comment """Can be used to link a SHACL property shape with a SHACL node expression that produces the URIs of one or more graphs that contain further RDF triples for the value nodes delivered by the property. This can be used to inform a processor that it should switch to another data graph when the user wants to retrieve more information about a value. + +The node expressions are evaluated with the focus node as input. (It is unclear whether there are also cases where the result may be different for each specific value, in which case the node expression would need a second input argument). + +This property should be regarded as an "annotation", i.e. it does not have any impact on validation or other built-in SHACL features. However, selected tools may want to use this information.""" ; + rdfs:label "details graph" ; +. +dash:editor + a rdf:Property ; + rdfs:comment "Can be used to link a property shape with an editor, to state a preferred editing widget in user interfaces." ; + rdfs:domain sh:PropertyShape ; + rdfs:label "editor" ; + rdfs:range dash:Editor ; +. +dash:expectedResult + a rdf:Property ; + rdfs:comment "The expected result(s) of a test case. The value range of this property is different for each kind of test cases." ; + rdfs:domain dash:TestCase ; + rdfs:label "expected result" ; +. +dash:expectedResultIsJSON + a rdf:Property ; + rdfs:comment "A flag to indicate that the expected result represents a JSON string. If set to true, then tests would compare JSON structures (regardless of whitespaces) instead of actual syntax." ; + rdfs:label "expected result is JSON" ; + rdfs:range xsd:boolean ; +. +dash:expectedResultIsTTL + a rdf:Property ; + rdfs:comment "A flag to indicate that the expected result represents an RDF graph encoded as a Turtle file. If set to true, then tests would compare graphs instead of actual syntax." ; + rdfs:domain dash:TestCase ; + rdfs:label "expected result is Turtle" ; + rdfs:range xsd:boolean ; +. +dash:fixed + a rdf:Property ; + rdfs:comment "Can be used to mark that certain validation results have already been fixed." ; + rdfs:domain sh:ValidationResult ; + rdfs:label "fixed" ; + rdfs:range xsd:boolean ; +. +dash:hasClass + a sh:SPARQLAskValidator ; + rdfs:label "has class" ; + sh:ask """ + ASK { + $value rdf:type/rdfs:subClassOf* $class . + } + """ ; + sh:message "Value does not have class {$class}" ; + sh:prefixes ; +. +dash:hasMaxExclusive + a sh:SPARQLAskValidator ; + rdfs:comment "Checks whether a given node (?value) has a value less than (<) the provided ?maxExclusive. Returns false if this cannot be determined, e.g. because values do not have comparable types." ; + rdfs:label "has max exclusive" ; + sh:ask "ASK { FILTER ($value < $maxExclusive) }" ; + sh:prefixes ; +. +dash:hasMaxInclusive + a sh:SPARQLAskValidator ; + rdfs:comment "Checks whether a given node (?value) has a value less than or equal to (<=) the provided ?maxInclusive. Returns false if this cannot be determined, e.g. because values do not have comparable types." ; + rdfs:label "has max inclusive" ; + sh:ask "ASK { FILTER ($value <= $maxInclusive) }" ; + sh:prefixes ; +. +dash:hasMaxLength + a sh:SPARQLAskValidator ; + rdfs:comment "Checks whether a given string (?value) has a length within a given maximum string length." ; + rdfs:label "has max length" ; + sh:ask """ + ASK { + FILTER (STRLEN(str($value)) <= $maxLength) . + } + """ ; + sh:prefixes ; +. +dash:hasMinExclusive + a sh:SPARQLAskValidator ; + rdfs:comment "Checks whether a given node (?value) has value greater than (>) the provided ?minExclusive. Returns false if this cannot be determined, e.g. because values do not have comparable types." ; + rdfs:label "has min exclusive" ; + sh:ask "ASK { FILTER ($value > $minExclusive) }" ; + sh:prefixes ; +. +dash:hasMinInclusive + a sh:SPARQLAskValidator ; + rdfs:comment "Checks whether a given node (?value) has value greater than or equal to (>=) the provided ?minInclusive. Returns false if this cannot be determined, e.g. because values do not have comparable types." ; + rdfs:label "has min inclusive" ; + sh:ask "ASK { FILTER ($value >= $minInclusive) }" ; + sh:prefixes ; +. +dash:hasMinLength + a sh:SPARQLAskValidator ; + rdfs:comment "Checks whether a given string (?value) has a length within a given minimum string length." ; + rdfs:label "has min length" ; + sh:ask """ + ASK { + FILTER (STRLEN(str($value)) >= $minLength) . + } + """ ; + sh:prefixes ; +. +dash:hasNodeKind + a sh:SPARQLAskValidator ; + rdfs:comment "Checks whether a given node (?value) has a given sh:NodeKind (?nodeKind). For example, sh:hasNodeKind(42, sh:Literal) = true." ; + rdfs:label "has node kind" ; + sh:ask """ + ASK { + FILTER ((isIRI($value) && $nodeKind IN ( sh:IRI, sh:BlankNodeOrIRI, sh:IRIOrLiteral ) ) || + (isLiteral($value) && $nodeKind IN ( sh:Literal, sh:BlankNodeOrLiteral, sh:IRIOrLiteral ) ) || + (isBlank($value) && $nodeKind IN ( sh:BlankNode, sh:BlankNodeOrIRI, sh:BlankNodeOrLiteral ) )) . + } + """ ; + sh:prefixes ; +. +dash:hasPattern + a sh:SPARQLAskValidator ; + rdfs:comment "Checks whether the string representation of a given node (?value) matches a given regular expression (?pattern). Returns false if the value is a blank node." ; + rdfs:label "has pattern" ; + sh:ask "ASK { FILTER (!isBlank($value) && IF(bound($flags), regex(str($value), $pattern, $flags), regex(str($value), $pattern))) }" ; + sh:prefixes ; +. +dash:hasRootClass + a sh:SPARQLAskValidator ; + rdfs:label "has root class" ; + sh:ask """ASK { + $value rdfs:subClassOf* $rootClass . +}""" ; + sh:prefixes ; +. +dash:hasStem + a sh:SPARQLAskValidator ; + rdfs:comment "Checks whether a given node is an IRI starting with a given stem." ; + rdfs:label "has stem" ; + sh:ask "ASK { FILTER (isIRI($value) && STRSTARTS(str($value), $stem)) }" ; + sh:prefixes ; +. +dash:hasValueIn + a rdf:Property ; + rdfs:comment "Specifies a constraint that at least one of the value nodes must be a member of the given list." ; + rdfs:label "has value in" ; + rdfs:range rdf:List ; +. +dash:hasValueWithClass + a rdf:Property ; + rdfs:comment "Specifies a constraint that at least one of the value nodes must be an instance of a given class." ; + rdfs:label "has value with class" ; + rdfs:range rdfs:Class ; +. +dash:height + a rdf:Property ; + rdfs:comment "The height." ; + rdfs:label "height" ; + rdfs:range xsd:integer ; +. +dash:hidden + a rdf:Property ; + rdfs:comment "Properties marked as hidden do not appear in user interfaces, yet remain part of the shape for other purposes such as validation and scripting or GraphQL schema generation." ; + rdfs:domain sh:PropertyShape ; + rdfs:label "hidden" ; + rdfs:range xsd:boolean ; +. +dash:index + a rdf:Property ; + rdfs:label "index" ; + rdfs:range xsd:integer ; +. +dash:indexed + a rdf:Property ; + rdfs:domain sh:PropertyShape ; + rdfs:range xsd:boolean ; +. +dash:isDeactivated + a sh:SPARQLFunction ; + dash:apiStatus dash:Stable ; + rdfs:comment "Checks whether a given shape or constraint has been marked as \"deactivated\" using sh:deactivated." ; + rdfs:label "is deactivated" ; + sh:ask """ASK { + ?constraintOrShape sh:deactivated true . +}""" ; + sh:parameter [ + a sh:Parameter ; + sh:path dash:constraintOrShape ; + sh:description "The sh:Constraint or sh:Shape to test." ; + sh:name "constraint or shape" ; + ] ; + sh:prefixes ; + sh:returnType xsd:boolean ; +. +dash:isIn + a sh:SPARQLAskValidator ; + rdfs:label "is in" ; + sh:ask """ + ASK { + GRAPH $shapesGraph { + $in (rdf:rest*)/rdf:first $value . + } + } + """ ; + sh:prefixes ; +. +dash:isLanguageIn + a sh:SPARQLAskValidator ; + rdfs:label "is language in" ; + sh:ask """ + ASK { + BIND (lang($value) AS ?valueLang) . + FILTER EXISTS { + GRAPH $shapesGraph { + $languageIn (rdf:rest*)/rdf:first ?lang . + FILTER (langMatches(?valueLang, ?lang)) + } } + } + """ ; + sh:prefixes ; +. +dash:isNodeKindBlankNode + a sh:SPARQLFunction ; + dash:apiStatus dash:Stable ; + dash:cachable true ; + dash:contextFree true ; + rdfs:comment "Checks if a given sh:NodeKind is one that includes BlankNodes." ; + rdfs:label "is NodeKind BlankNode" ; + sh:ask """ASK { + FILTER ($nodeKind IN ( sh:BlankNode, sh:BlankNodeOrIRI, sh:BlankNodeOrLiteral )) +}""" ; + sh:parameter [ + a sh:Parameter ; + sh:path dash:nodeKind ; + sh:class sh:NodeKind ; + sh:description "The sh:NodeKind to check." ; + sh:name "node kind" ; + sh:nodeKind sh:IRI ; + ] ; + sh:prefixes ; + sh:returnType xsd:boolean ; +. +dash:isNodeKindIRI + a sh:SPARQLFunction ; + dash:apiStatus dash:Stable ; + dash:cachable true ; + dash:contextFree true ; + rdfs:comment "Checks if a given sh:NodeKind is one that includes IRIs." ; + rdfs:label "is NodeKind IRI" ; + sh:ask """ASK { + FILTER ($nodeKind IN ( sh:IRI, sh:BlankNodeOrIRI, sh:IRIOrLiteral )) +}""" ; + sh:parameter [ + a sh:Parameter ; + sh:path dash:nodeKind ; + sh:class sh:NodeKind ; + sh:description "The sh:NodeKind to check." ; + sh:name "node kind" ; + sh:nodeKind sh:IRI ; + ] ; + sh:prefixes ; + sh:returnType xsd:boolean ; +. +dash:isNodeKindLiteral + a sh:SPARQLFunction ; + dash:apiStatus dash:Stable ; + dash:cachable true ; + dash:contextFree true ; + rdfs:comment "Checks if a given sh:NodeKind is one that includes Literals." ; + rdfs:label "is NodeKind Literal" ; + sh:ask """ASK { + FILTER ($nodeKind IN ( sh:Literal, sh:BlankNodeOrLiteral, sh:IRIOrLiteral )) +}""" ; + sh:parameter [ + a sh:Parameter ; + sh:path dash:nodeKind ; + sh:class sh:NodeKind ; + sh:description "The sh:NodeKind to check." ; + sh:name "node kind" ; + sh:nodeKind sh:IRI ; + ] ; + sh:prefixes ; + sh:returnType xsd:boolean ; +. +dash:isSubClassOf + a sh:SPARQLFunction ; + dash:apiStatus dash:Stable ; + rdfs:comment "Returns true if a given class (first argument) is a subclass of a given other class (second argument), or identical to that class. This is equivalent to an rdfs:subClassOf* check." ; + rdfs:label "is subclass of" ; + sh:ask """ASK { + $subclass rdfs:subClassOf* $superclass . +}""" ; + sh:parameter dash:isSubClassOf-subclass ; + sh:parameter dash:isSubClassOf-superclass ; + sh:prefixes ; + sh:returnType xsd:boolean ; +. +dash:isSubClassOf-subclass + a sh:Parameter ; + sh:path dash:subclass ; + sh:class rdfs:Class ; + sh:description "The (potential) subclass." ; + sh:name "subclass" ; +. +dash:isSubClassOf-superclass + a sh:Parameter ; + sh:path dash:superclass ; + sh:class rdfs:Class ; + sh:description "The (potential) superclass." ; + sh:name "superclass" ; + sh:order "1"^^xsd:decimal ; +. +dash:js + a rdf:Property ; + rdfs:comment "The JavaScript source code of a Script." ; + rdfs:domain dash:Script ; + rdfs:label "JavaScript source code" ; + rdfs:range xsd:string ; +. +dash:localConstraint + a rdf:Property ; + rdfs:comment """Can be set to true for those constraint components where the validation does not require to visit any other triples than the shape definitions and the direct property values of the focus node mentioned in the property constraints. Examples of this include sh:minCount and sh:hasValue. + +Constraint components that are marked as such can be optimized by engines, e.g. they can be evaluated client-side at form submission time, without having to make a round-trip to a server, assuming the client has downloaded a complete snapshot of the resource. + +Any component marked with dash:staticConstraint is also a dash:localConstraint.""" ; + rdfs:domain sh:ConstraintComponent ; + rdfs:label "local constraint" ; + rdfs:range xsd:boolean ; +. +dash:mimeTypes + a rdf:Property ; + rdfs:comment """For file-typed properties, this can be used to specify the expected/allowed mime types of its values. This can be used, for example, to limit file input boxes or file selectors. If multiple values are allowed then they need to be separated by commas. + +Example values are listed at https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types""" ; + rdfs:domain sh:PropertyShape ; + rdfs:label "mime types" ; + rdfs:range xsd:string ; +. +dash:neverMaterialize + a rdf:Property ; + rdfs:comment "If set to true at a property shape then any sh:values and sh:defaultValue rules of this property will be ignored when 'all inferences' are computed. This is useful for property values that shall only be computed for individual focus nodes (e.g. when a user visits a resource) but not for large inference runs." ; + rdfs:domain sh:PropertyShape ; + rdfs:label "never materialize" ; + rdfs:range xsd:boolean ; +. +dash:onAllValues + a rdf:Property ; + rdfs:comment "If set to true for a ScriptConstraint or ScriptValidator, then the associated script will receive all value nodes at once, as a value of the variable values. By default (or false), the script is called for each value node individually." ; + rdfs:label "on all values" ; + rdfs:range xsd:boolean ; +. +dash:propertySuggestionGenerator + a rdf:Property ; + rdfs:comment "Links the constraint component with instances of dash:SuggestionGenerator that may be used to produce suggestions for a given validation result that was produced by a property constraint." ; + rdfs:domain sh:ConstraintComponent ; + rdfs:label "property suggestion generator" ; + rdfs:range dash:SuggestionGenerator ; +. +dash:readOnly + a rdf:Property ; + rdfs:comment "Used as a hint for user interfaces that values of the associated property should not be editable. The values of this may be the boolean literals true or false or, more generally, a SHACL node expression that must evaluate to true or false." ; + rdfs:domain sh:PropertyShape ; + rdfs:label "read only" ; +. +dash:reifiableBy + a rdf:Property ; + rdfs:comment "Can be used to specify the node shape that may be applied to reified statements produced by a property shape. The property shape must have a URI resource as its sh:path. The values of this property must be node shapes. User interfaces can use this information to determine which properties to present to users when reified statements are explored or edited. Use dash:None to indicate that no reification should be permitted." ; + rdfs:domain sh:PropertyShape ; + rdfs:label "reifiable by" ; + rdfs:range sh:NodeShape ; +. +dash:resourceAction + a rdf:Property ; + rdfs:comment "Links a class with the Resource Actions that can be applied to instances of that class." ; + rdfs:domain rdfs:Class ; + rdfs:label "resource action" ; + rdfs:range dash:ResourceAction ; +. +dash:rootClass + a rdf:Property ; + rdfs:label "root class" ; +. +dash:shape + a rdf:Property ; + rdfs:comment "States that a subject resource has a given shape. This property can, for example, be used to capture results of SHACL validation on static data." ; + rdfs:label "shape" ; + rdfs:range sh:Shape ; +. +dash:shapeScript + a rdf:Property ; + rdfs:domain sh:NodeShape ; + rdfs:label "shape script" ; +. +dash:singleLine + a rdf:Property ; + rdfs:label "single line" ; + rdfs:range xsd:boolean ; +. +dash:staticConstraint + a rdf:Property ; + rdfs:comment """Can be set to true for those constraint components where the validation does not require to visit any other triples than the parameters. Examples of this include sh:datatype or sh:nodeKind, where no further triples need to be queried to determine the result. + +Constraint components that are marked as such can be optimized by engines, e.g. they can be evaluated client-side at form submission time, without having to make a round-trip to a server.""" ; + rdfs:domain sh:ConstraintComponent ; + rdfs:label "static constraint" ; + rdfs:range xsd:boolean ; +. +dash:stem + a rdf:Property ; + rdfs:comment "Specifies a string value that the IRI of the value nodes must start with."@en ; + rdfs:label "stem"@en ; + rdfs:range xsd:string ; +. +dash:subSetOf + a rdf:Property ; + rdfs:label "sub set of" ; +. +dash:suggestion + a rdf:Property ; + rdfs:comment "Can be used to link a result with one or more suggestions on how to address or improve the underlying issue." ; + rdfs:domain sh:AbstractResult ; + rdfs:label "suggestion" ; + rdfs:range dash:Suggestion ; +. +dash:suggestionConfidence + a rdf:Property ; + rdfs:comment "An optional confidence between 0% and 100%. Suggestions with 100% confidence are strongly recommended. Can be used to sort recommended updates." ; + rdfs:domain dash:Suggestion ; + rdfs:label "suggestion confidence" ; + rdfs:range xsd:decimal ; +. +dash:suggestionGenerator + a rdf:Property ; + rdfs:comment "Links a sh:SPARQLConstraint or sh:JSConstraint with instances of dash:SuggestionGenerator that may be used to produce suggestions for a given validation result that was produced by the constraint." ; + rdfs:label "suggestion generator" ; + rdfs:range dash:SuggestionGenerator ; +. +dash:suggestionGroup + a rdf:Property ; + rdfs:comment "Can be used to link a suggestion with the group identifier to which it belongs. By default this is a link to the dash:SuggestionGenerator, but in principle this could be any value." ; + rdfs:domain dash:Suggestion ; + rdfs:label "suggestion" ; +. +dash:symmetric + a rdf:Property ; + rdfs:comment "True to declare that the associated property path is symmetric." ; + rdfs:label "symmetric" ; +. +dash:toString + a sh:SPARQLFunction ; + dash:cachable true ; + rdfs:comment "Returns a literal with datatype xsd:string that has the input value as its string. If the input value is an (URI) resource then its URI will be used." ; + rdfs:label "to string" ; + sh:labelTemplate "Convert {$arg} to xsd:string" ; + sh:parameter [ + a sh:Parameter ; + sh:path dash:arg ; + sh:description "The input value." ; + sh:name "arg" ; + sh:nodeKind sh:IRIOrLiteral ; + ] ; + sh:prefixes ; + sh:returnType xsd:string ; + sh:select """SELECT (xsd:string($arg) AS ?result) +WHERE { +}""" ; +. +dash:uniqueValueForClass + a rdf:Property ; + rdfs:label "unique value for class" ; +. +dash:uriTemplate + a sh:SPARQLFunction ; + dash:apiStatus dash:Stable ; + dash:cachable true ; + dash:contextFree true ; + rdfs:comment """Inserts a given value into a given URI template, producing a new xsd:anyURI literal. + +In the future this should support RFC 6570 but for now it is limited to simple {...} patterns.""" ; + rdfs:label "URI template" ; + sh:parameter [ + a sh:Parameter ; + sh:path dash:template ; + sh:datatype xsd:string ; + sh:description "The URI template, e.g. \"http://example.org/{symbol}\"." ; + sh:name "template" ; + sh:order 0 ; + ] ; + sh:parameter [ + a sh:Parameter ; + sh:path dash:value ; + sh:description "The literal value to insert into the template. Will use the URI-encoded string of the lexical form (for now)." ; + sh:name "value" ; + sh:nodeKind sh:Literal ; + sh:order 1 ; + ] ; + sh:prefixes ; + sh:returnType xsd:anyURI ; + sh:select """SELECT ?result +WHERE { + BIND (xsd:anyURI(REPLACE(?template, "\\\\{[a-zA-Z]+\\\\}", $value)) AS ?result) +}""" ; +. +dash:validateShapes + a rdf:Property ; + rdfs:comment "True to also validate the shapes itself (i.e. parameter declarations)." ; + rdfs:domain dash:GraphValidationTestCase ; + rdfs:label "validate shapes" ; + rdfs:range xsd:boolean ; +. +dash:valueCount + a sh:SPARQLFunction ; + dash:apiStatus dash:Stable ; + rdfs:comment "Computes the number of objects for a given subject/predicate combination." ; + rdfs:label "value count" ; + sh:parameter [ + a sh:Parameter ; + sh:path dash:predicate ; + sh:class rdfs:Resource ; + sh:description "The predicate to get the number of objects of." ; + sh:name "predicate" ; + sh:order 1 ; + ] ; + sh:parameter [ + a sh:Parameter ; + sh:path dash:subject ; + sh:class rdfs:Resource ; + sh:description "The subject to get the number of objects of." ; + sh:name "subject" ; + sh:order 0 ; + ] ; + sh:prefixes ; + sh:returnType xsd:integer ; + sh:select """ + SELECT (COUNT(?object) AS ?result) + WHERE { + $subject $predicate ?object . + } +""" ; +. +dash:viewer + a rdf:Property ; + rdfs:comment "Can be used to link a property shape with a viewer, to state a preferred viewing widget in user interfaces." ; + rdfs:domain sh:PropertyShape ; + rdfs:label "viewer" ; + rdfs:range dash:Viewer ; +. +dash:width + a rdf:Property ; + rdfs:comment "The width." ; + rdfs:label "width" ; + rdfs:range xsd:integer ; +. +dash:x + a rdf:Property ; + rdfs:comment "The x position." ; + rdfs:label "x" ; + rdfs:range xsd:integer ; +. +dash:y + a rdf:Property ; + rdfs:comment "The y position." ; + rdfs:label "y" ; + rdfs:range xsd:integer ; +. +owl:Class + a rdfs:Class ; + rdfs:subClassOf rdfs:Class ; +. +sh:AbstractResult + dash:abstract true ; +. +sh:ClassConstraintComponent + sh:labelTemplate "Value needs to have class {$class}" ; + sh:validator dash:hasClass ; +. +sh:ClosedConstraintComponent + dash:localConstraint true ; + sh:labelTemplate "Closed shape: only the enumerated properties can be used" ; + sh:nodeValidator [ + a sh:SPARQLSelectValidator ; + sh:message "Predicate {?path} is not allowed (closed shape)" ; + sh:prefixes ; + sh:select """ + SELECT $this (?predicate AS ?path) ?value + WHERE { + { + FILTER ($closed) . + } + $this ?predicate ?value . + FILTER (NOT EXISTS { + GRAPH $shapesGraph { + $currentShape sh:property/sh:path ?predicate . + } + } && (!bound($ignoredProperties) || NOT EXISTS { + GRAPH $shapesGraph { + $ignoredProperties rdf:rest*/rdf:first ?predicate . + } + })) + } +""" ; + ] ; +. +sh:DatatypeConstraintComponent + dash:staticConstraint true ; + sh:labelTemplate "Values must have datatype {$datatype}" ; + sh:message "Value does not have datatype {$datatype}" ; +. +sh:DisjointConstraintComponent + dash:localConstraint true ; + sh:validator [ + a sh:SPARQLAskValidator ; + sh:ask """ + ASK { + FILTER NOT EXISTS { + $this $disjoint $value . + } + } + """ ; + sh:message "Property must not share any values with {$disjoint}" ; + sh:prefixes ; + ] ; +. +sh:EqualsConstraintComponent + dash:localConstraint true ; + sh:message "Must have same values as {$equals}" ; + sh:nodeValidator [ + a sh:SPARQLSelectValidator ; + sh:prefixes ; + sh:select """ + SELECT DISTINCT $this ?value + WHERE { + { + FILTER NOT EXISTS { $this $equals $this } + BIND ($this AS ?value) . + } + UNION + { + $this $equals ?value . + FILTER (?value != $this) . + } + } + """ ; + ] ; + sh:propertyValidator [ + a sh:SPARQLSelectValidator ; + sh:prefixes ; + sh:select """ + SELECT DISTINCT $this ?value + WHERE { + { + $this $PATH ?value . + MINUS { + $this $equals ?value . + } + } + UNION + { + $this $equals ?value . + MINUS { + $this $PATH ?value . + } + } + } + """ ; + ] ; +. +sh:Function + dash:abstract true ; +. +sh:HasValueConstraintComponent + dash:localConstraint true ; + sh:labelTemplate "Must have value {$hasValue}" ; + sh:nodeValidator [ + a sh:SPARQLAskValidator ; + sh:ask """ASK { + FILTER ($value = $hasValue) +}""" ; + sh:message "Value must be {$hasValue}" ; + sh:prefixes ; + ] ; + sh:propertyValidator [ + a sh:SPARQLSelectValidator ; + sh:message "Missing expected value {$hasValue}" ; + sh:prefixes ; + sh:select """ + SELECT $this + WHERE { + FILTER NOT EXISTS { $this $PATH $hasValue } + } + """ ; + ] ; +. +sh:InConstraintComponent + dash:localConstraint true ; + sh:labelTemplate "Value must be in {$in}" ; + sh:message "Value is not in {$in}" ; + sh:validator dash:isIn ; +. +sh:LanguageInConstraintComponent + dash:localConstraint true ; + sh:labelTemplate "Language must match any of {$languageIn}" ; + sh:message "Language does not match any of {$languageIn}" ; + sh:validator dash:isLanguageIn ; +. +sh:LessThanConstraintComponent + dash:localConstraint true ; + sh:message "Value is not < value of {$lessThan}" ; + sh:propertyValidator [ + a sh:SPARQLSelectValidator ; + sh:prefixes ; + sh:select """ + SELECT $this ?value + WHERE { + $this $PATH ?value . + $this $lessThan ?otherValue . + BIND (?value < ?otherValue AS ?result) . + FILTER (!bound(?result) || !(?result)) . + } + """ ; + ] ; +. +sh:LessThanOrEqualsConstraintComponent + dash:localConstraint true ; + sh:message "Value is not <= value of {$lessThanOrEquals}" ; + sh:propertyValidator [ + a sh:SPARQLSelectValidator ; + sh:prefixes ; + sh:select """ + SELECT DISTINCT $this ?value + WHERE { + $this $PATH ?value . + $this $lessThanOrEquals ?otherValue . + BIND (?value <= ?otherValue AS ?result) . + FILTER (!bound(?result) || !(?result)) . + } +""" ; + ] ; +. +sh:MaxCountConstraintComponent + dash:localConstraint true ; + sh:labelTemplate "Must not have more than {$maxCount} values" ; + sh:message "More than {$maxCount} values" ; + sh:propertyValidator [ + a sh:SPARQLSelectValidator ; + sh:prefixes ; + sh:select """ + SELECT $this + WHERE { + $this $PATH ?value . + } + GROUP BY $this + HAVING (COUNT(DISTINCT ?value) > $maxCount) + """ ; + ] ; +. +sh:MaxExclusiveConstraintComponent + dash:staticConstraint true ; + sh:labelTemplate "Value must be < {$maxExclusive}" ; + sh:message "Value is not < {$maxExclusive}" ; + sh:validator dash:hasMaxExclusive ; +. +sh:MaxInclusiveConstraintComponent + dash:staticConstraint true ; + sh:labelTemplate "Value must be <= {$maxInclusive}" ; + sh:message "Value is not <= {$maxInclusive}" ; + sh:validator dash:hasMaxInclusive ; +. +sh:MaxLengthConstraintComponent + dash:staticConstraint true ; + sh:labelTemplate "Value must not have more than {$maxLength} characters" ; + sh:message "Value has more than {$maxLength} characters" ; + sh:validator dash:hasMaxLength ; +. +sh:MinCountConstraintComponent + dash:localConstraint true ; + sh:labelTemplate "Must have at least {$minCount} values" ; + sh:message "Fewer than {$minCount} values" ; + sh:propertyValidator [ + a sh:SPARQLSelectValidator ; + sh:prefixes ; + sh:select """ + SELECT $this + WHERE { + OPTIONAL { + $this $PATH ?value . + } + } + GROUP BY $this + HAVING (COUNT(DISTINCT ?value) < $minCount) + """ ; + ] ; +. +sh:MinExclusiveConstraintComponent + dash:staticConstraint true ; + sh:labelTemplate "Value must be > {$minExclusive}" ; + sh:message "Value is not > {$minExclusive}" ; + sh:validator dash:hasMinExclusive ; +. +sh:MinInclusiveConstraintComponent + dash:staticConstraint true ; + sh:labelTemplate "Value must be >= {$minInclusive}" ; + sh:message "Value is not >= {$minInclusive}" ; + sh:validator dash:hasMinInclusive ; +. +sh:MinLengthConstraintComponent + dash:staticConstraint true ; + sh:labelTemplate "Value must have less than {$minLength} characters" ; + sh:message "Value has less than {$minLength} characters" ; + sh:validator dash:hasMinLength ; +. +sh:NodeConstraintComponent + sh:message "Value does not have shape {$node}" ; +. +sh:NodeKindConstraintComponent + dash:staticConstraint true ; + sh:labelTemplate "Value must have node kind {$nodeKind}" ; + sh:message "Value does not have node kind {$nodeKind}" ; + sh:validator dash:hasNodeKind ; +. +sh:NotConstraintComponent + sh:labelTemplate "Value must not have shape {$not}" ; + sh:message "Value does have shape {$not}" ; +. +sh:Parameterizable + dash:abstract true ; +. +sh:PatternConstraintComponent + dash:staticConstraint true ; + sh:labelTemplate "Value must match pattern \"{$pattern}\"" ; + sh:message "Value does not match pattern \"{$pattern}\"" ; + sh:validator dash:hasPattern ; +. +sh:QualifiedMaxCountConstraintComponent + sh:labelTemplate "No more than {$qualifiedMaxCount} values can have shape {$qualifiedValueShape}" ; + sh:message "More than {$qualifiedMaxCount} values have shape {$qualifiedValueShape}" ; +. +sh:QualifiedMinCountConstraintComponent + sh:labelTemplate "No fewer than {$qualifiedMinCount} values can have shape {$qualifiedValueShape}" ; + sh:message "Fewer than {$qualifiedMinCount} values have shape {$qualifiedValueShape}" ; +. +sh:Rule + dash:abstract true ; +. +sh:Rules + a rdfs:Resource ; + rdfs:comment "The SHACL rules entailment regime." ; + rdfs:label "SHACL Rules" ; + rdfs:seeAlso ; +. +sh:SPARQLExecutable + dash:abstract true ; +. +sh:Shape + dash:abstract true ; +. +sh:Target + dash:abstract true ; +. +sh:TargetType + dash:abstract true ; +. +sh:UniqueLangConstraintComponent + dash:localConstraint true ; + sh:labelTemplate "No language can be used more than once" ; + sh:message "Language \"{?lang}\" used more than once" ; + sh:propertyValidator [ + a sh:SPARQLSelectValidator ; + sh:prefixes ; + sh:select """ + SELECT DISTINCT $this ?lang + WHERE { + { + FILTER sameTerm($uniqueLang, true) . + } + $this $PATH ?value . + BIND (lang(?value) AS ?lang) . + FILTER (bound(?lang) && ?lang != "") . + FILTER EXISTS { + $this $PATH ?otherValue . + FILTER (?otherValue != ?value && ?lang = lang(?otherValue)) . + } + } + """ ; + ] ; +. +sh:Validator + dash:abstract true ; +. +sh:order + rdfs:range xsd:decimal ; +. diff --git a/pyshacl/assets/make_builtin.py b/pyshacl/assets/make_builtin.py index 7f5102e..428ed84 100644 --- a/pyshacl/assets/make_builtin.py +++ b/pyshacl/assets/make_builtin.py @@ -11,18 +11,25 @@ with open("./schema.ttl", "rb") as f: g = Graph(store=store, identifier=identifier, bind_namespaces='core').parse(file=f) with open("./schema.pickle", "wb") as f: - pickle.dump((store, identifier), f, protocol=4) # protocol 5 only works in python 3.8+ + pickle.dump((store, identifier), f, protocol=5) identifier = URIRef("http://www.w3.org/ns/shacl#") store = Memory(identifier=identifier) with open("./shacl.ttl", "rb") as f: g = Graph(store=store, identifier=identifier, bind_namespaces='core').parse(file=f) with open("./shacl.pickle", "wb") as f: - pickle.dump((store, identifier), f, protocol=4) # protocol 5 only works in python 3.8+ + pickle.dump((store, identifier), f, protocol=5) + +identifier = URIRef("http://datashapes.org/dash") +store = Memory(identifier=identifier) +with open("./dash.ttl", "rb") as f: + g = Graph(store=store, identifier=identifier, bind_namespaces='core').parse(file=f) +with open("./dash.pickle", "wb") as f: + pickle.dump((store, identifier), f, protocol=5) identifier = URIRef("http://www.w3.org/ns/shacl-shacl#") store = Memory(identifier=identifier) with open("./shacl-shacl.ttl", "rb") as f: g = Graph(store=store, identifier=identifier, bind_namespaces='core').parse(file=f) with open("./shacl-shacl.pickle", "wb") as f: - pickle.dump((store, identifier), f, protocol=4) # protocol 5 only works in python 3.8+ + pickle.dump((store, identifier), f, protocol=5) diff --git a/pyshacl/assets/schema.pickle b/pyshacl/assets/schema.pickle index 051f844..279d10a 100644 Binary files a/pyshacl/assets/schema.pickle and b/pyshacl/assets/schema.pickle differ diff --git a/pyshacl/assets/shacl-shacl.pickle b/pyshacl/assets/shacl-shacl.pickle index a872e3c..0b3dfea 100644 Binary files a/pyshacl/assets/shacl-shacl.pickle and b/pyshacl/assets/shacl-shacl.pickle differ diff --git a/pyshacl/assets/shacl.pickle b/pyshacl/assets/shacl.pickle index 353d380..370b48a 100644 Binary files a/pyshacl/assets/shacl.pickle and b/pyshacl/assets/shacl.pickle differ diff --git a/pyshacl/constraints/constraint_component.py b/pyshacl/constraints/constraint_component.py index 1d1fdf3..35fe60d 100644 --- a/pyshacl/constraints/constraint_component.py +++ b/pyshacl/constraints/constraint_component.py @@ -166,7 +166,11 @@ def make_v_result_description( severity_desc = "Validation Result" source_shape_text = stringify_node(sg, self.shape.node) severity_node_text = stringify_node(sg, severity) - focus_node_text = stringify_node(datagraph or sg, focus_node) + try: + focus_node_text = stringify_node(datagraph or sg, focus_node) + except (LookupError, ValueError): + # focus node doesn't exist in the datagraph. We can deal. + focus_node_text = str(focus_node) desc = "{} in {} ({}):\n\tSeverity: {}\n\tSource Shape: {}\n\tFocus Node: {}\n".format( severity_desc, constraint_name, @@ -176,7 +180,11 @@ def make_v_result_description( focus_node_text, ) if value_node is not None: - val_node_string = stringify_node(datagraph or sg, value_node) + try: + val_node_string = stringify_node(datagraph or sg, value_node) + except (LookupError, ValueError): + # value node doesn't exist in the datagraph. + val_node_string = str(value_node) desc += "\tValue Node: {}\n".format(val_node_string) if result_path is None and self.shape.is_property_shape: result_path = self.shape.path() diff --git a/pyshacl/constraints/core/cardinality_constraints.py b/pyshacl/constraints/core/cardinality_constraints.py index 4d0f49f..113c089 100644 --- a/pyshacl/constraints/core/cardinality_constraints.py +++ b/pyshacl/constraints/core/cardinality_constraints.py @@ -78,9 +78,12 @@ def make_generic_messages(self, datagraph: GraphLike, focus_node, value_node) -> p = self.shape.path() if p: p = stringify_node(self.shape.sg.graph, p) - m = "Less than {} values on {}->{}".format( - str(self.min_count.value), stringify_node(datagraph, focus_node), p - ) + try: + focus_string = stringify_node(datagraph, focus_node) + except (LookupError, ValueError): + # focus node doesn't exist in the datagraph. We can deal. + focus_string = str(focus_node) + m = "Less than {} values on {}->{}".format(str(self.min_count.value), focus_string, p) else: m = "Less than {} values on {}".format(str(self.min_count.value), stringify_node(datagraph, focus_node)) return [Literal(m)] diff --git a/pyshacl/constraints/core/logical_constraints.py b/pyshacl/constraints/core/logical_constraints.py index 777f7e5..b81f2ae 100644 --- a/pyshacl/constraints/core/logical_constraints.py +++ b/pyshacl/constraints/core/logical_constraints.py @@ -57,11 +57,16 @@ def constraint_name(cls) -> str: return "NotConstraintComponent" def make_generic_messages(self, datagraph: GraphLike, focus_node, value_node) -> List[rdflib.Literal]: + try: + value_node_str = stringify_node(datagraph, value_node) + except (LookupError, ValueError): + # value node doesn't exist in the datagraph. + value_node_str = str(value_node) if len(self.not_list) == 1: - m = f"Node {stringify_node(datagraph, value_node)} must not conform to shape {stringify_node(self.shape.sg.graph, self.not_list[0])}" + m = f"Node {value_node_str} must not conform to shape {stringify_node(self.shape.sg.graph, self.not_list[0])}" else: nots_list = " , ".join(stringify_node(self.shape.sg.graph, n) for n in self.not_list) - m = f"Node {stringify_node(datagraph, value_node)} must not conform to any shapes in {nots_list}" + m = f"Node {value_node_str} must not conform to any shapes in {nots_list}" return [rdflib.Literal(m)] def evaluate(self, executor: SHACLExecutor, datagraph: GraphLike, focus_value_nodes: Dict, _evaluation_path: List): @@ -172,7 +177,12 @@ def make_generic_messages(self, datagraph: GraphLike, focus_node, value_node) -> ) and_node_strings.append(f"({and_node_string1})") and_node_string = " and ".join(and_node_strings) - m = "Node {} must conform to all shapes in {}".format(stringify_node(datagraph, value_node), and_node_string) + try: + value_node_str = stringify_node(datagraph, value_node) + except (LookupError, ValueError): + # value node doesn't exist in the datagraph. + value_node_str = str(value_node) + m = f"Node {value_node_str} must conform to all shapes in {and_node_string}" return [rdflib.Literal(m)] def evaluate( @@ -277,9 +287,12 @@ def make_generic_messages(self, datagraph: GraphLike, focus_node, value_node) -> ) or_node_strings.append(f"({or_node_string1})") or_node_string = " and ".join(or_node_strings) - m = "Node {} must conform to one or more shapes in {}".format( - stringify_node(datagraph, value_node), or_node_string - ) + try: + value_node_str = stringify_node(datagraph, value_node) + except (LookupError, ValueError): + # value node doesn't exist in the datagraph. + value_node_str = str(value_node) + m = f"Node {value_node_str} must conform to one or more shapes in {or_node_string}" return [rdflib.Literal(m)] def evaluate( @@ -384,9 +397,12 @@ def make_generic_messages(self, datagraph: GraphLike, focus_node, value_node) -> ) xone_node_strings.append(f"({xone_node_string1})") xone_node_string = " and ".join(xone_node_strings) - m = "Node {} must conform to exactly one shape in {}".format( - stringify_node(datagraph, value_node), xone_node_string - ) + try: + value_node_str = stringify_node(datagraph, value_node) + except (LookupError, ValueError): + # value node doesn't exist in the datagraph. + value_node_str = str(value_node) + m = f"Node {value_node_str} must conform to exactly one shape in {xone_node_string}" return [rdflib.Literal(m)] def evaluate( diff --git a/pyshacl/constraints/core/string_based_constraints.py b/pyshacl/constraints/core/string_based_constraints.py index 279f758..8a098f1 100644 --- a/pyshacl/constraints/core/string_based_constraints.py +++ b/pyshacl/constraints/core/string_based_constraints.py @@ -4,7 +4,7 @@ """ import logging import re -from typing import Dict, List +from typing import Dict, List, cast import rdflib from rdflib.namespace import XSD @@ -261,7 +261,7 @@ class PatternConstraintComponent(StringBasedConstraintBase): def __init__(self, shape: Shape) -> None: super(PatternConstraintComponent, self).__init__(shape) - patterns_found: List[RDFNode] = [] + patterns_found: List[rdflib.Literal] = [] for pattern_found in self.shape.objects(SH_pattern): if not isinstance(pattern_found, rdflib.Literal): raise ConstraintLoadError( @@ -274,7 +274,7 @@ def __init__(self, shape: Shape) -> None: "PatternConstraintComponent must have at least one sh:pattern predicate.", "https://www.w3.org/TR/shacl/#PatternConstraintComponent", ) - self.string_rules = patterns_found + self.string_rules = cast(List[RDFNode], patterns_found) flags_found = set(self.shape.objects(SH_flags)) if len(flags_found) > 0: # Just get the first found flags @@ -282,6 +282,24 @@ def __init__(self, shape: Shape) -> None: else: self.flags = None + re_flags = 0 + if self.flags: + flags = str(self.flags.value).lower() + case_insensitive = 'i' in flags + if case_insensitive: + re_flags |= re.I + m = 'm' in flags + if m: + re_flags |= re.M + self.compiled_cache = {} + for p in patterns_found: + if p.value is not None and len(p.value) > 1: + re_pattern = str(p.value) + else: + re_pattern = str(p) + re_matcher = re.compile(re_pattern, re_flags) + self.compiled_cache[p] = re_matcher + @classmethod def constraint_parameters(cls) -> List[rdflib.URIRef]: return [SH_pattern] @@ -311,18 +329,9 @@ def make_generic_messages(self, datagraph: GraphLike, focus_node, value_node) -> def _evaluate_string_rule(self, r, target_graph, f_v_dict): reports = [] non_conformant = False - assert isinstance(r, rdflib.Literal) - re_flags = 0 - if self.flags: - flags = str(self.flags.value).lower() - case_insensitive = 'i' in flags - if case_insensitive: - re_flags |= re.I - m = 'm' in flags - if m: - re_flags |= re.M - re_pattern = str(r.value) - re_matcher = re.compile(re_pattern, re_flags) + re_matcher = self.compiled_cache.get(r, None) + if re_matcher is None: + raise RuntimeError(f"No compiled regex for {r}") for f, value_nodes in f_v_dict.items(): for v in value_nodes: match = False @@ -379,7 +388,7 @@ def constraint_name(cls) -> str: return "LanguageInConstraintComponent" def make_generic_messages(self, datagraph: GraphLike, focus_node, value_node) -> List[rdflib.Literal]: - m = "String language is not in {}".format(stringify_node(datagraph, self.string_rules[0])) + m = "String language is not in {}".format(stringify_node(self.shape.sg.graph, self.string_rules[0])) return [rdflib.Literal(m)] def _evaluate_string_rule(self, r, target_graph, f_v_dict): diff --git a/pyshacl/rdfutil/clone.py b/pyshacl/rdfutil/clone.py index 3d31565..acf070c 100644 --- a/pyshacl/rdfutil/clone.py +++ b/pyshacl/rdfutil/clone.py @@ -125,6 +125,8 @@ def mix_datasets( for i in base_ds.store.contexts(None) ] if isinstance(base_ds, rdflib.Dataset) and len(base_named_graphs) < 1: + # rdflib.Dataset always includes the DEFAULT_GRAPH_ID named graph + # but a conjunctive graph does not. It _could_ return no graphs. base_named_graphs = [ rdflib.Graph(base_ds.store, DATASET_DEFAULT_GRAPH_ID, namespace_manager=base_ds.namespace_manager) ] @@ -355,5 +357,5 @@ def clone_node( cloned_o = clone_node(graph, o, target_graph, recursion=recursion + 1, deep_clone=deep_clone) target_graph.add((new_node, cloned_p, cloned_o)) else: - new_node = rdflib.term.Identifier(str(node)) + raise ValueError(f"Cannot clone node of type {type(node)}") return new_node diff --git a/pyshacl/rdfutil/load.py b/pyshacl/rdfutil/load.py index 95d5329..7908d34 100644 --- a/pyshacl/rdfutil/load.py +++ b/pyshacl/rdfutil/load.py @@ -13,10 +13,12 @@ from urllib.error import HTTPError import rdflib -from rdflib.namespace import NamespaceManager +from rdflib.namespace import SDO, NamespaceManager from .clone import clone_dataset, clone_graph +SCHEMA = SDO + ConjunctiveLike = Union[rdflib.ConjunctiveGraph, rdflib.Dataset] GraphLike = Union[ConjunctiveLike, rdflib.Graph] @@ -112,10 +114,24 @@ def get_rdf_from_web(url: Union[rdflib.URIRef, str]): return resp, filename, known_format, True +# What's the difference between PublicID and BaseURI? +# The BaseURI is a part of Turtle and SPARQL spec, it is used to resolve relative URIs. +# The BaseURI usually ends with a filename (eg, https://example.com/validators/shapes) +# BaseURI can sometimes end with a / if URIs are relative to a directory. +# You will rarely see a BaseURI with a # on the end. +# The PublicID is the Identifier of a Graph. It is the canonical name of the graph, +# regardless of its hosted location. It is used to refer to the graph in a Dataset +# and this is the name referenced in the owl:imports [ schema:name ] statement. +# PublicID is not found in the Turtle file, it is known outside the file only. +# PublicID can end with a / or a # if you want consistency with the graph's base prefix. +# Alternatively, PublicID may not have a symbol at the end. + + def load_from_source( source: Union[GraphLike, BufferedIOBase, TextIOBase, str, bytes], g: Optional[GraphLike] = None, rdf_format: Optional[str] = None, + identifier: Optional[str] = None, multigraph: bool = False, do_owl_imports: Union[bool, int] = False, import_chain: Optional[List[Union[rdflib.URIRef, str]]] = None, @@ -124,12 +140,14 @@ def load_from_source( """ :param source: - :param g: + :param g: The Graph to load into, optional. If not given, a new Dataset or Graph will be created. :type g: rdflib.Graph | None :param rdf_format: :type rdf_format: str | None :param multigraph: :type multigraph: bool + :param identifier: formerly "public_id" + :type identifier: str | None :param do_owl_imports: :type do_owl_imports: bool|int :param import_chain: @@ -145,8 +163,10 @@ def load_from_source( source_as_filename: Optional[str] = None source_as_bytes: Optional[bytes] = None filename = None - public_id = None - uri_prefix = None + identifier = str(identifier) # This is our passed-in id (formerly public_id) + _maybe_id: Optional[str] = None # For default-graph identifier + base_uri: Optional[str] = None # Base URI for relative URIs + uri_prefix = None # URI Prefix to bind to public ID if logger is None: logger = getLogger("rdfutil.load") logger.setLevel(WARNING) @@ -156,11 +176,15 @@ def load_from_source( if g is None: g = source else: - raise RuntimeError("Cannot pass in both target=rdflib.Graph/Dataset and g=graph.") + raise RuntimeError( + "Cannot pass in both source=rdflib.Graph/Dataset and g=graph." + "Source and dest cannot be the same graph." + ) elif isinstance(source, (BufferedIOBase, TextIOBase)): if hasattr(source, 'name'): filename = source.name # type: ignore - public_id = Path(filename).resolve().as_uri() + "#" + file_uri = Path(filename).resolve().as_uri() + _maybe_id = file_uri if isinstance(source, TextIOBase): buf = getattr(source, "buffer") # type: BufferedIOBase source_as_file = source = buf @@ -177,19 +201,24 @@ def load_from_source( elif isinstance(source, str): if source == "stdin" or source == "-" or source == "/dev/stdin": - public_id = "/dev/stdin" + _maybe_id = "/dev/stdin" + # Don't set base_uri, it is not used for /dev/stdin filename = "/dev/stdin" source_as_filename = filename elif is_windows and source.startswith('file:///'): - public_id = source + # A local file name cannot end with # or with /, so + # identifier always has no symbol at the end. + _maybe_id = source filename = source[8:] source_as_filename = filename elif not is_windows and source.startswith('file://'): - public_id = source + _maybe_id = source # See local file comment above filename = source[7:] source_as_filename = filename elif source.startswith('http:') or source.startswith('https:'): - public_id = source + # It can be tricky to guess public_id from a web URL. + # In this case we will always simply use the URL as the public_id as given. + _maybe_id = source try: resp, resp_filename, web_format, raw_fp = get_rdf_from_web(source) except HTTPError: @@ -251,9 +280,7 @@ def load_from_source( source_was_open = True except OSError: pass - # TODO: Do we still need this? Not sure why this was added, but works better without it - # if public_id and not public_id.endswith('#'): - # public_id = "{}#".format(public_id) + if not source_as_file and not source_as_filename and not open_source and isinstance(source, str): # source is raw RDF data. source_as_bytes = source = source.encode('utf-8') @@ -283,15 +310,23 @@ def load_from_source( if source_is_graph: target_g: Union[rdflib.Graph, rdflib.ConjunctiveGraph, rdflib.Dataset] = source # type: ignore else: + default_graph_base: Union[str, None] = identifier if identifier else None if multigraph: - target_ds = rdflib.Dataset(default_graph_base=public_id) + target_ds = rdflib.Dataset(default_graph_base=default_graph_base, default_union=True) target_ds.namespace_manager = NamespaceManager(target_ds, 'core') - target_ds.default_context.namespace_manager = target_ds.namespace_manager - default_g = target_ds.default_context - target_ds.graph(default_g) + if identifier: # if identifier is explicitly given, use that as a new named graph id + old_default_context = target_ds.default_context + named_g = target_ds.graph(default_graph_base) + named_g.base = default_graph_base + target_ds.default_context = named_g + target_ds.remove_graph(old_default_context) + else: + target_ds.default_context.namespace_manager = target_ds.namespace_manager + default_g = target_ds.default_context + target_ds.graph(default_g) target_g = target_ds else: - target_g = rdflib.Graph(bind_namespaces='core') + target_g = rdflib.Graph(bind_namespaces='core', base=default_graph_base) else: if not isinstance(g, (rdflib.Graph, rdflib.Dataset, rdflib.ConjunctiveGraph)): raise RuntimeError("Passing in 'g' must be a rdflib Graph or Dataset.") @@ -312,13 +347,16 @@ def load_from_source( rdf_format = rdf_format or 'trig' elif filename.endswith('.xml') or filename.endswith('.rdf'): rdf_format = rdf_format or 'xml' + elif filename.endswith('.hext'): + rdf_format = rdf_format or 'hext' if source_as_filename and filename is not None and not open_source: filename = str(Path(filename).resolve()) - if not public_id: - public_id = Path(filename).as_uri() + "#" + if not _maybe_id: + _maybe_id = Path(filename).as_uri() source = open_source = cast(BufferedIOBase, open(filename, mode='rb')) if not open_source and source_as_bytes: source = open_source = BytesIO(source_as_bytes) # type: ignore + if open_source: _source = open_source # Check if we can seek @@ -332,24 +370,28 @@ def load_from_source( source = _source = new_bytes source_was_open = False if rdf_format is None: - line = _source.readline().lstrip() - line_len = len(line) if line is not None else 0 - while ( - (line is not None and line_len == 0) - or (line_len == 1 and line[0] == "\n") - or (line_len == 2 and line[0:2] == "\r\n") - ): - line = _source.readline().lstrip() + line: Union[bytes, None] = _source.readline() + line = None if line is None else line.lstrip() + line_len: int = len(line) if line is not None else 0 + while line is not None and line_len == 0: + line = _source.readline() + line = None if line is None else line.lstrip() line_len = len(line) if line is not None else 0 - if line_len > 15: - line = line[:15] - line = line.lower() - if line.startswith(b" 15: + line = line[:15] + line = line.lower() + if line.startswith(b" MAX_OWL_IMPORT_DEPTH: return target_g else: @@ -466,79 +526,96 @@ def load_from_source( if import_chain is None: import_chain = [] - if public_id and (public_id.endswith('#') or public_id.endswith('/')): - root_id: Union[rdflib.URIRef, None] = rdflib.URIRef(public_id[:-1]) - else: - root_id = rdflib.URIRef(public_id) if public_id else None - done_imports = 0 - if root_id is not None: - if isinstance(target_g, (rdflib.ConjunctiveGraph, rdflib.Dataset)): - gs = list(target_g.contexts()) + return chain_load_owl_imports(identifier, target_g, import_chain, do_owl_imports, multigraph) + return target_g + + +def chain_load_owl_imports( + parent_id: Union[str, None], + target_g: GraphLike, + import_chain: List[Union[rdflib.URIRef, str]], + load_iter: int, + multigraph: bool, +) -> GraphLike: + if parent_id and (parent_id.endswith('#') or parent_id.endswith('/')): + root_id: Union[rdflib.URIRef, None] = rdflib.URIRef(parent_id[:-1]) + else: + root_id = rdflib.URIRef(parent_id) if parent_id else None + done_imports = 0 + + def _load_from_imports_nodes(imports_nodes: List[Union[rdflib.URIRef, rdflib.BNode]]) -> int: + nonlocal target_g, multigraph, import_chain, load_iter + _done_imports = 0 + for _i in imports_nodes: + import_with_identifier: Union[str, None] = None + if isinstance(_i, rdflib.BNode): + urls = list(target_g.objects(_i, SCHEMA.url)) + prioritized_urls = [] # Tuples of (priority, url_str) + # Value of type variable "SupportsRichComparisonT" of "sorted" cannot be "Node" + # Maybe we need to add "SupportsRichComparisonT" to Node in RDFLib? + for url_i in sorted(urls): # type: ignore[type-var] + url_str = str(url_i) + if url_str.startswith("file:"): + prioritized_urls.append((1, url_str)) + else: + prioritized_urls.append((9, url_str)) + _prio, imp_str = sorted(prioritized_urls)[0] # this causes the first (alphabetically) URL to be used + use_identifiers = list(target_g.objects(_i, SCHEMA.identifier)) + if len(use_identifiers) > 0: + import_with_identifier = str(next(iter(use_identifiers))) else: - gs = [target_g] - for ng in gs: - owl_imports = list(ng.objects(root_id, rdflib.OWL.imports)) - if len(owl_imports) > 0: - import_chain.append(str(root_id)) - for i in owl_imports: - imp_str = str(i) - if imp_str in import_chain: - continue - load_from_source( - imp_str, - g=target_g, - multigraph=multigraph, - do_owl_imports=do_owl_imports + 1, - import_chain=import_chain, - ) - done_imports += 1 - if done_imports < 1 and public_id is not None and root_id != public_id: - public_id_uri = rdflib.URIRef(public_id) - if isinstance(target_g, (rdflib.ConjunctiveGraph, rdflib.Dataset)): - gs = list(target_g.contexts()) + imp_str = str(_i) + if imp_str in import_chain: + continue + load_from_source( + imp_str, + g=target_g, + identifier=import_with_identifier, + multigraph=multigraph, + do_owl_imports=load_iter + 1, + import_chain=import_chain, + ) + _done_imports += 1 + return _done_imports + + if isinstance(target_g, (rdflib.ConjunctiveGraph, rdflib.Dataset)): + # Don't care about named graphs, search across the whole + # thing at once. + target_g.default_union = True + + if root_id is not None: + owl_imports = list(target_g.objects(root_id, rdflib.OWL.imports)) + if len(owl_imports) > 0: + import_chain.append(str(root_id)) + _done_imports = _load_from_imports_nodes(owl_imports) # type: ignore[arg-type] + if _done_imports < 1: + import_chain.pop() else: - gs = [target_g] - for ng in gs: - owl_imports = list(ng.objects(public_id_uri, rdflib.OWL.imports)) - if len(owl_imports) > 0: - import_chain.append(str(public_id_uri)) - for i in owl_imports: - imp_str = str(i) - if imp_str in import_chain: - continue - load_from_source( - imp_str, - g=target_g, - multigraph=multigraph, - do_owl_imports=do_owl_imports + 1, - import_chain=import_chain, - ) - done_imports += 1 - if done_imports < 1: - if isinstance(target_g, (rdflib.ConjunctiveGraph, rdflib.Dataset)): - gs = list(target_g.contexts()) + done_imports += _done_imports + if done_imports < 1 and parent_id is not None and root_id != parent_id: + public_id_uri = rdflib.URIRef(parent_id) + owl_imports = list(target_g.objects(public_id_uri, rdflib.OWL.imports)) + if len(owl_imports) > 0: + import_chain.append(str(public_id_uri)) + _done_imports = _load_from_imports_nodes(owl_imports) # type: ignore[arg-type] + if _done_imports < 1: + import_chain.pop() else: - gs = [target_g] - for ng in gs: - ontologies = ng.subjects(rdflib.RDF.type, rdflib.OWL.Ontology) - for ont in ontologies: - if ont == root_id or ont == public_id: - continue - ont_str = str(ont) - if ont_str in import_chain: - continue - import_chain.append(ont_str) - owl_imports = list(ng.objects(ont, rdflib.OWL.imports)) - for i in owl_imports: - imp_str = str(i) - if imp_str in import_chain: - continue - load_from_source( - imp_str, - g=target_g, - multigraph=multigraph, - do_owl_imports=do_owl_imports + 1, - import_chain=import_chain, - ) - done_imports += 1 + done_imports += _done_imports + if done_imports < 1: + ontologies = target_g.subjects(rdflib.RDF.type, rdflib.OWL.Ontology) + for ont in ontologies: + if ont == root_id or ont == parent_id: + continue + ont_str = str(ont) + if ont_str in import_chain: + continue + owl_imports = list(target_g.objects(ont, rdflib.OWL.imports)) + if len(owl_imports) > 0: + import_chain.append(ont_str) + _done_imports = _load_from_imports_nodes(owl_imports) # type: ignore[arg-type] + if _done_imports < 1: + import_chain.pop() + else: + done_imports += _done_imports return target_g diff --git a/pyshacl/rdfutil/stringify.py b/pyshacl/rdfutil/stringify.py index 902cda1..62bb3e0 100644 --- a/pyshacl/rdfutil/stringify.py +++ b/pyshacl/rdfutil/stringify.py @@ -124,7 +124,7 @@ def stringify_literal(graph: rdflib.Graph, node: rdflib.Literal, ns_manager: Opt return node_string -def find_node_named_graph(dataset, node): +def find_node_named_graph(dataset: Union[rdflib.Dataset, rdflib.ConjunctiveGraph], node) -> rdflib.Graph: """ Search through each graph in a dataset for one node, when it finds it, returns the graph it is in :param dataset: @@ -133,14 +133,26 @@ def find_node_named_graph(dataset, node): """ if isinstance(node, rdflib.Literal): raise RuntimeError("Cannot search for a Literal node in a dataset.") - for g in iter(dataset.contexts()): - try: - # This will issue StopIteration if node is not found in g, and continue to the next graph - _ = next(iter(g.predicate_objects(node))) + + # Check if node is a subject in any graph + for q in iter(dataset.quads((node, None, None, None))): + s, p, o, g = q + if g is None: + continue + elif isinstance(g, rdflib.Graph): return g - except StopIteration: + else: + return dataset.get_context(g) + # Now check if node is a object in any graph + for q in iter(dataset.quads((None, None, node, None))): + s, p, o, g = q + if g is None: continue - raise RuntimeError(f"Cannot find node {node} in any named graph.") + elif isinstance(g, rdflib.Graph): + return g + else: + return dataset.get_context(g) + raise LookupError(f"Cannot find node {node} in any named graph.") def stringify_node( diff --git a/pyshacl/rule_expand_runner.py b/pyshacl/rule_expand_runner.py index 269deec..aef0359 100644 --- a/pyshacl/rule_expand_runner.py +++ b/pyshacl/rule_expand_runner.py @@ -10,7 +10,6 @@ from .consts import ( env_truths, ) -from .errors import ReportableRuntimeError from .extras import check_extra_installed from .functions import apply_functions, gather_functions, unapply_functions from .pytypes import GraphLike, SHACLExecutor @@ -49,7 +48,7 @@ def __init__( if not isinstance(data_graph, rdflib.Graph): raise RuntimeError("data_graph must be a rdflib Graph-like object") self.data_graph = data_graph # type: GraphLike - self._target_graph = None + self._target_graph: Union[GraphLike, None] = None self.ont_graph = ont_graph # type: Optional[GraphLike] self.data_graph_is_multigraph = isinstance(self.data_graph, (rdflib.Dataset, rdflib.ConjunctiveGraph)) if self.ont_graph is not None and isinstance(self.ont_graph, (rdflib.Dataset, rdflib.ConjunctiveGraph)): @@ -78,63 +77,8 @@ def _load_default_options(cls, options_dict: dict): if options_dict['debug']: options_dict['logger'].setLevel(logging.DEBUG) - @classmethod - def _run_pre_inference( - cls, target_graph: GraphLike, inference_option: str, logger: Optional[logging.Logger] = None - ): - """ - Note, this is the OWL/RDFS pre-inference, - it is not the Advanced Spec SHACL-Rule inferencing step. - :param target_graph: - :type target_graph: rdflib.Graph|rdflib.ConjunctiveGraph|rdflib.Dataset - :param inference_option: - :type inference_option: str - :return: - :rtype: NoneType - """ - # Lazy import owlrl - import owlrl - - from .inference import CustomRDFSOWLRLSemantics, CustomRDFSSemantics - - if logger is None: - logger = logging.getLogger(__name__) - try: - if inference_option == 'rdfs': - inferencer = owlrl.DeductiveClosure(CustomRDFSSemantics) - elif inference_option == 'owlrl': - inferencer = owlrl.DeductiveClosure(owlrl.OWLRL_Semantics) - elif inference_option == 'both' or inference_option == 'all' or inference_option == 'rdfsowlrl': - inferencer = owlrl.DeductiveClosure(CustomRDFSOWLRLSemantics) - else: - raise ReportableRuntimeError("Don't know how to do '{}' type inferencing.".format(inference_option)) - except Exception as e: # pragma: no cover - logger.error("Error during creation of OWL-RL Deductive Closure") - if isinstance(e, ReportableRuntimeError): - raise e - raise ReportableRuntimeError( - "Error during creation of OWL-RL Deductive Closure\n{}".format(str(e.args[0])) - ) - if isinstance(target_graph, (rdflib.Dataset, rdflib.ConjunctiveGraph)): - named_graphs = [] - for i in target_graph.store.contexts(None): - if isinstance(i, rdflib.Graph): - named_graphs.append(i) - else: - named_graphs.append( - rdflib.Graph(target_graph.store, i, namespace_manager=target_graph.namespace_manager) - ) - else: - named_graphs = [target_graph] - try: - for g in named_graphs: - inferencer.expand(g) - except Exception as e: # pragma: no cover - logger.error("Error while running OWL-RL Deductive Closure") - raise ReportableRuntimeError("Error while running OWL-RL Deductive Closure\n{}".format(str(e.args[0]))) - @property - def target_graph(self): + def target_graph(self) -> Union[GraphLike, None]: return self._target_graph def mix_in_ontology(self): @@ -165,9 +109,10 @@ def make_executor(self) -> SHACLExecutor: ) def run(self) -> GraphLike: - if self.target_graph is not None: + datagraph: Union[GraphLike, None] = self.target_graph + if datagraph is not None: # Target graph is already set up with pre-inferenced and pre-cloned data_graph - the_target_graph = self.target_graph + self._target_graph = datagraph else: has_cloned = False if self.ont_graph is not None: @@ -176,30 +121,30 @@ def run(self) -> GraphLike: else: self.logger.debug("Cloning DataGraph to temporary memory graph, to add ontology definitions.") # creates a copy of self.data_graph, doesn't modify it - the_target_graph = self.mix_in_ontology() + datagraph = self.mix_in_ontology() has_cloned = True else: - the_target_graph = self.data_graph + datagraph = self.data_graph inference_option = self.options.get('inference', 'none') if self.inplace and self.debug: self.logger.debug("Skipping DataGraph clone because PySHACL is operating in inplace mode.") if inference_option and not self.pre_inferenced and str(inference_option) != "none": if not has_cloned and not self.inplace: self.logger.debug("Cloning DataGraph to temporary memory graph before pre-inferencing.") - the_target_graph = clone_graph(the_target_graph) + datagraph = clone_graph(datagraph) has_cloned = True self.logger.debug(f"Running pre-inferencing with option='{inference_option}'.") - self._run_pre_inference(the_target_graph, inference_option, logger=self.logger) + self._run_pre_inference(datagraph, inference_option, logger=self.logger) self.pre_inferenced = True if not has_cloned and not self.inplace: # We still need to clone in advanced mode, because of triple rules self.logger.debug( "Forcing clone of DataGraph because expanding rules cannot modify the input datagraph." ) - the_target_graph = clone_graph(the_target_graph) + datagraph = clone_graph(datagraph) has_cloned = True - self._target_graph = the_target_graph - + self._target_graph = datagraph + assert self._target_graph is not None if self.options.get("use_shapes", None) is not None and len(self.options["use_shapes"]) > 0: using_manually_specified_shapes = True expanded_use_shapes = [] @@ -237,7 +182,7 @@ def run(self) -> GraphLike: expanded_focus_nodes.append(URIRef(f)) else: try: - expanded_focus_node = self.target_graph.namespace_manager.expand_curie(f) + expanded_focus_node = self._target_graph.namespace_manager.expand_curie(f) except ValueError: expanded_focus_node = URIRef(f) expanded_focus_nodes.append(expanded_focus_node) @@ -261,33 +206,25 @@ def run(self) -> GraphLike: for s in shapes: s.set_advanced(True) apply_target_types(target_types) - if isinstance(the_target_graph, (rdflib.Dataset, rdflib.ConjunctiveGraph)): - named_graphs = [ - ( - rdflib.Graph(the_target_graph.store, i, namespace_manager=the_target_graph.namespace_manager) # type: ignore[arg-type] - if not isinstance(i, rdflib.Graph) - else i - ) - for i in the_target_graph.store.contexts(None) - ] - else: - named_graphs = [the_target_graph] + if isinstance(self._target_graph, (rdflib.Dataset, rdflib.ConjunctiveGraph)): + self._target_graph.default_union = True + + g = self._target_graph + if specified_focus_nodes is not None and using_manually_specified_shapes: on_focus_nodes: Union[Sequence[URIRef], None] = specified_focus_nodes else: on_focus_nodes = None + if self.debug: - self.logger.debug(f"Will run SHACL Rules expansion on {len(named_graphs)} named graph/s.") - for g in named_graphs: - if self.debug: - self.logger.debug(f"Running SHACL Rules on DataGraph named {g.identifier}") + self.logger.debug(f"Running SHACL Rules on DataGraph named {g.identifier}") + if gathered_functions: + apply_functions(executor, gathered_functions, g) + try: + if gathered_rules: + apply_rules(executor, gathered_rules, g, focus_nodes=on_focus_nodes) + finally: if gathered_functions: - apply_functions(executor, gathered_functions, g) - try: - if gathered_rules: - apply_rules(executor, gathered_rules, g, focus_nodes=on_focus_nodes) - finally: - if gathered_functions: - unapply_functions(gathered_functions, g) + unapply_functions(gathered_functions, g) - return the_target_graph + return g diff --git a/pyshacl/run_type.py b/pyshacl/run_type.py index 89b4b7b..1a4623a 100644 --- a/pyshacl/run_type.py +++ b/pyshacl/run_type.py @@ -1,4 +1,13 @@ +import logging from abc import ABCMeta, abstractmethod +from typing import TYPE_CHECKING, Optional + +import rdflib + +from pyshacl.errors import ReportableRuntimeError + +if TYPE_CHECKING: + from pyshacl.pytypes import GraphLike class PySHACLRunType(metaclass=ABCMeta): @@ -7,3 +16,60 @@ class PySHACLRunType(metaclass=ABCMeta): @abstractmethod def run(self): raise NotImplementedError() # pragma: no cover + + @classmethod + def _run_pre_inference( + cls, target_graph: 'GraphLike', inference_option: str, logger: Optional[logging.Logger] = None + ): + """ + Note, this is the OWL/RDFS pre-inference, + it is not the Advanced Spec SHACL-Rule inferencing step. + :param target_graph: + :type target_graph: rdflib.Graph|rdflib.ConjunctiveGraph|rdflib.Dataset + :param inference_option: + :type inference_option: str + :return: + :rtype: NoneType + """ + # Lazy import owlrl + import owlrl + + from .inference import CustomRDFSOWLRLSemantics, CustomRDFSSemantics + + if logger is None: + logger = logging.getLogger(__name__) + try: + if inference_option == 'rdfs': + inferencer = owlrl.DeductiveClosure(CustomRDFSSemantics) + elif inference_option == 'owlrl': + inferencer = owlrl.DeductiveClosure(owlrl.OWLRL_Semantics) + elif inference_option == 'both' or inference_option == 'all' or inference_option == 'rdfsowlrl': + inferencer = owlrl.DeductiveClosure(CustomRDFSOWLRLSemantics) + else: + raise ReportableRuntimeError("Don't know how to do '{}' type inferencing.".format(inference_option)) + except Exception as e: # pragma: no cover + logger.error("Error during creation of OWL-RL Deductive Closure") + if isinstance(e, ReportableRuntimeError): + raise e + raise ReportableRuntimeError( + "Error during creation of OWL-RL Deductive Closure\n{}".format(str(e.args[0])) + ) + if isinstance(target_graph, (rdflib.Dataset, rdflib.ConjunctiveGraph)): + named_graphs = [] + for i in target_graph.store.contexts(None): + if isinstance(i, rdflib.Graph): + named_graphs.append(i) + else: + named_graphs.append( + rdflib.Graph(target_graph.store, i, namespace_manager=target_graph.namespace_manager) + ) + else: + named_graphs = [target_graph] + try: + # I'd prefer to not have to infer every namged graph individually, but OWL-RL doesn't + # support doing inference on a Dataset/ConjunctiveGraph yet. (New release will be soon?) + for g in named_graphs: + inferencer.expand(g) + except Exception as e: # pragma: no cover + logger.error("Error while running OWL-RL Deductive Closure") + raise ReportableRuntimeError("Error while running OWL-RL Deductive Closure\n{}".format(str(e.args[0]))) diff --git a/pyshacl/shapes_graph.py b/pyshacl/shapes_graph.py index dfadccc..1be67e3 100644 --- a/pyshacl/shapes_graph.py +++ b/pyshacl/shapes_graph.py @@ -72,7 +72,7 @@ def js_enabled(self): def _add_system_triples(self): if isinstance(self.graph, (rdflib.Dataset, rdflib.ConjunctiveGraph)): - g = next(iter(self.graph.contexts())) + g = self.graph.default_context else: g = self.graph for t in self.system_triples: diff --git a/pyshacl/validator.py b/pyshacl/validator.py index 762bb22..0a8a014 100644 --- a/pyshacl/validator.py +++ b/pyshacl/validator.py @@ -56,7 +56,7 @@ def __init__( if not isinstance(data_graph, rdflib.Graph): raise RuntimeError("data_graph must be a rdflib Graph object") self.data_graph = data_graph # type: GraphLike - self._target_graph = None + self._target_graph: Union[GraphLike, None] = None self.ont_graph = ont_graph # type: Optional[GraphLike] self.data_graph_is_multigraph = isinstance(self.data_graph, (rdflib.Dataset, rdflib.ConjunctiveGraph)) if self.ont_graph is not None and isinstance(self.ont_graph, (rdflib.Dataset, rdflib.ConjunctiveGraph)): @@ -99,61 +99,6 @@ def _load_default_options(cls, options_dict: dict): if options_dict['debug']: options_dict['logger'].setLevel(logging.DEBUG) - @classmethod - def _run_pre_inference( - cls, target_graph: GraphLike, inference_option: str, logger: Optional[logging.Logger] = None - ): - """ - Note, this is the OWL/RDFS pre-inference, - it is not the Advanced Spec SHACL-Rule inferencing step. - :param target_graph: - :type target_graph: rdflib.Graph|rdflib.ConjunctiveGraph|rdflib.Dataset - :param inference_option: - :type inference_option: str - :return: - :rtype: NoneType - """ - # Lazy import owlrl - import owlrl - - from .inference import CustomRDFSOWLRLSemantics, CustomRDFSSemantics - - if logger is None: - logger = logging.getLogger(__name__) - try: - if inference_option == 'rdfs': - inferencer = owlrl.DeductiveClosure(CustomRDFSSemantics) - elif inference_option == 'owlrl': - inferencer = owlrl.DeductiveClosure(owlrl.OWLRL_Semantics) - elif inference_option == 'both' or inference_option == 'all' or inference_option == 'rdfsowlrl': - inferencer = owlrl.DeductiveClosure(CustomRDFSOWLRLSemantics) - else: - raise ReportableRuntimeError("Don't know how to do '{}' type inferencing.".format(inference_option)) - except Exception as e: # pragma: no cover - logger.error("Error during creation of OWL-RL Deductive Closure") - if isinstance(e, ReportableRuntimeError): - raise e - raise ReportableRuntimeError( - "Error during creation of OWL-RL Deductive Closure\n{}".format(str(e.args[0])) - ) - if isinstance(target_graph, (rdflib.Dataset, rdflib.ConjunctiveGraph)): - named_graphs = [] - for i in target_graph.store.contexts(None): - if isinstance(i, rdflib.Graph): - named_graphs.append(i) - else: - named_graphs.append( - rdflib.Graph(target_graph.store, i, namespace_manager=target_graph.namespace_manager) - ) - else: - named_graphs = [target_graph] - try: - for g in named_graphs: - inferencer.expand(g) - except Exception as e: # pragma: no cover - logger.error("Error while running OWL-RL Deductive Closure") - raise ReportableRuntimeError("Error while running OWL-RL Deductive Closure\n{}".format(str(e.args[0]))) - @classmethod def create_validation_report(cls, sg, conforms: bool, results: List[Tuple]): v_text = "Validation Report\nConforms: {}\n".format(str(conforms)) @@ -192,7 +137,7 @@ def create_validation_report(cls, sg, conforms: bool, results: List[Tuple]): return vg, v_text @property - def target_graph(self): + def target_graph(self) -> Union[GraphLike, None]: return self._target_graph def mix_in_ontology(self): @@ -223,8 +168,9 @@ def make_executor(self) -> SHACLExecutor: ) def run(self): - if self.target_graph is not None: - the_target_graph = self.target_graph + datagraph: Union[GraphLike, None] = self.target_graph + if datagraph is not None: + self._target_graph = datagraph else: has_cloned = False if self.ont_graph is not None: @@ -233,10 +179,10 @@ def run(self): else: self.logger.debug("Cloning DataGraph to temporary memory graph, to add ontology definitions.") # creates a copy of self.data_graph, doesn't modify it - the_target_graph = self.mix_in_ontology() + datagraph = self.mix_in_ontology() has_cloned = True else: - the_target_graph = self.data_graph + datagraph = self.data_graph inference_option = self.options.get('inference', 'none') if self.inplace and self.debug: self.logger.debug("Skipping DataGraph clone because PySHACL is operating in inplace mode.") @@ -245,23 +191,24 @@ def run(self): raise ReportableRuntimeError("Cannot use any pre-inference option in SPARQL Remote Graph Mode.") if not has_cloned and not self.inplace: self.logger.debug("Cloning DataGraph to temporary memory graph before pre-inferencing.") - the_target_graph = clone_graph(the_target_graph) + datagraph = clone_graph(datagraph) has_cloned = True self.logger.debug(f"Running pre-inferencing with option='{inference_option}'.") - self._run_pre_inference(the_target_graph, inference_option, logger=self.logger) + self._run_pre_inference(datagraph, inference_option, logger=self.logger) self.pre_inferenced = True if not has_cloned and not self.inplace and self.options['advanced']: if self.options.get('sparql_mode', False): raise ReportableRuntimeError("Cannot clone DataGraph in SPARQL Remote Graph Mode.") # We still need to clone in advanced mode, because of triple rules self.logger.debug("Forcing clone of DataGraph because advanced mode is enabled.") - the_target_graph = clone_graph(the_target_graph) + datagraph = clone_graph(datagraph) has_cloned = True if not has_cloned and not self.inplace: # No inferencing, no ont_graph, and no advanced mode, now implies inplace mode self.logger.debug("Running validation in-place, without modifying the DataGraph.") self.inplace = True - self._target_graph = the_target_graph + self._target_graph = datagraph + assert self._target_graph is not None if self.options.get("use_shapes", None) is not None and len(self.options["use_shapes"]) > 0: using_manually_specified_shapes = True expanded_use_shapes = [] @@ -299,7 +246,7 @@ def run(self): expanded_focus_nodes.append(URIRef(f)) else: try: - expanded_focus_node = self.target_graph.namespace_manager.expand_curie(f) + expanded_focus_node = self._target_graph.namespace_manager.expand_curie(f) except ValueError: expanded_focus_node = URIRef(f) expanded_focus_nodes.append(expanded_focus_node) @@ -330,54 +277,42 @@ def run(self): else: advanced = {} - if isinstance(the_target_graph, (rdflib.Dataset, rdflib.ConjunctiveGraph)): - named_graphs = [ - ( - rdflib.Graph(the_target_graph.store, i, namespace_manager=the_target_graph.namespace_manager) # type: ignore[arg-type] - if not isinstance(i, rdflib.Graph) - else i - ) - for i in the_target_graph.store.contexts(None) - ] - else: - named_graphs = [the_target_graph] if specified_focus_nodes is not None and using_manually_specified_shapes: on_focus_nodes: Union[Sequence[URIRef], None] = specified_focus_nodes else: on_focus_nodes = None reports = [] non_conformant = False - aborted = False if executor.abort_on_first and self.debug: self.logger.debug( "Abort on first error is enabled. Will exit at end of first Shape that fails validation." ) + + if isinstance(self._target_graph, (rdflib.Dataset, rdflib.ConjunctiveGraph)): + self._target_graph.default_union = True + + g = self._target_graph + if self.debug: - self.logger.debug(f"Will run validation on {len(named_graphs)} named graph/s.") - for g in named_graphs: - if self.debug: - self.logger.debug(f"Validating DataGraph named {g.identifier}") - if advanced: - if advanced['functions']: - apply_functions(executor, advanced['functions'], g) - if advanced['rules']: - if executor.sparql_mode: - self.logger.warning("Skipping SHACL Rules because operating in SPARQL Remote Graph Mode.") - else: - apply_rules(executor, advanced['rules'], g, focus_nodes=on_focus_nodes) - try: - for s in shapes: - _is_conform, _reports = s.validate(executor, g, focus=on_focus_nodes) - non_conformant = non_conformant or (not _is_conform) - reports.extend(_reports) - if executor.abort_on_first and non_conformant: - aborted = True - break - if aborted: + self.logger.debug(f"Validating DataGraph named {g.identifier}") + if advanced: + if advanced['functions']: + apply_functions(executor, advanced['functions'], g) + if advanced['rules']: + if executor.sparql_mode: + self.logger.warning("Skipping SHACL Rules because operating in SPARQL Remote Graph Mode.") + else: + apply_rules(executor, advanced['rules'], g, focus_nodes=on_focus_nodes) + try: + for s in shapes: + _is_conform, _reports = s.validate(executor, g, focus=on_focus_nodes) + non_conformant = non_conformant or (not _is_conform) + reports.extend(_reports) + if executor.abort_on_first and non_conformant: break - finally: - if advanced and advanced['functions']: - unapply_functions(advanced['functions'], g) + finally: + if advanced and advanced['functions']: + unapply_functions(advanced['functions'], g) v_report, v_text = self.create_validation_report(self.shacl_graph, not non_conformant, reports) return (not non_conformant), v_report, v_text @@ -400,3 +335,7 @@ def assign_baked_in(): add_baked_in("http://datashapes.org/schema", schema_file) add_baked_in("https://datashapes.org/schema", schema_file) add_baked_in("http://datashapes.org/schema.ttl", schema_file) + dash_file = path.join(HERE, "assets", "dash.pickle") + add_baked_in("http://datashapes.org/dash", dash_file) + add_baked_in("https://datashapes.org/dash", dash_file) + add_baked_in("http://datashapes.org/dash.ttl", dash_file) diff --git a/pyshacl/validator_conformance.py b/pyshacl/validator_conformance.py index 6d230e2..370d6cd 100644 --- a/pyshacl/validator_conformance.py +++ b/pyshacl/validator_conformance.py @@ -290,6 +290,7 @@ def check_dash_result( gv_res = None if len(inf_test_cases_set) > 0: data_graph = validator.target_graph + assert data_graph is not None if isinstance(data_graph, (rdflib.ConjunctiveGraph, rdflib.Dataset)): named_graphs = list(data_graph.contexts()) else: @@ -311,6 +312,7 @@ def check_dash_result( if len(fn_test_cases_set) > 0: executor = validator.make_executor() data_graph = validator.target_graph + assert data_graph is not None fns = gather_functions(executor, validator.shacl_graph) apply_functions(executor, fns, data_graph) fn_res: Union[bool, None] = True diff --git a/test/test_dash_validate.py b/test/test_dash_validate.py index fda25bd..b61730e 100644 --- a/test/test_dash_validate.py +++ b/test/test_dash_validate.py @@ -3,6 +3,9 @@ import pytest from os import path, walk import glob + +import rdflib + import pyshacl from pyshacl.errors import ReportableRuntimeError @@ -31,6 +34,10 @@ @pytest.mark.parametrize('target_file, shacl_file', dash_core_files) def test_dash_validate_all_core(target_file, shacl_file): + # Literals in the data graph should be exactly the same as literals in the shapes graph + # When the validator parses the shapes graph, it does it with NORMALIZE_LITERALS disabled + # So we must also disable NORMALIZE_LITERALS when parsing the data graph + rdflib.NORMALIZE_LITERALS = False try: val, _, v_text = pyshacl.validate( target_file, shacl_graph=shacl_file, inference='rdfs', check_dash_result=True, debug=True, meta_shacl=False) @@ -44,6 +51,10 @@ def test_dash_validate_all_core(target_file, shacl_file): @pytest.mark.parametrize('target_file, shacl_file', dash_core_files) def test_dash_validate_all_core_sparql_mode(target_file, shacl_file): + # Literals in the data graph should be exactly the same as literals in the shapes graph + # When the validator parses the shapes graph, it does it with NORMALIZE_LITERALS disabled + # So we must also disable NORMALIZE_LITERALS when parsing the data graph + rdflib.NORMALIZE_LITERALS = False try: if shacl_file is None: # shacl_file cannot be None in SPARQL Remote Graph Mode @@ -54,8 +65,8 @@ def test_dash_validate_all_core_sparql_mode(target_file, shacl_file): print(e) val = False v_text = "" - assert val print(v_text) + assert val for x in walk(path.join(dash_files_dir, 'sparql')): @@ -64,6 +75,10 @@ def test_dash_validate_all_core_sparql_mode(target_file, shacl_file): @pytest.mark.parametrize('target_file, shacl_file', dash_sparql_files) def test_dash_validate_all_sparql(target_file, shacl_file): + # Literals in the data graph should be exactly the same as literals in the shapes graph + # When the validator parses the shapes graph, it does it with NORMALIZE_LITERALS disabled + # So we must also disable NORMALIZE_LITERALS when parsing the data graph + rdflib.NORMALIZE_LITERALS = False try: val, _, v_text = pyshacl.validate( target_file, shacl_graph=shacl_file, inference='rdfs', check_dash_result=True, debug=True, meta_shacl=False) @@ -76,6 +91,10 @@ def test_dash_validate_all_sparql(target_file, shacl_file): @pytest.mark.parametrize('target_file, shacl_file', dash_sparql_files) def test_dash_validate_all_sparql_sparql_mode(target_file, shacl_file): + # Literals in the data graph should be exactly the same as literals in the shapes graph + # When the validator parses the shapes graph, it does it with NORMALIZE_LITERALS disabled + # So we must also disable NORMALIZE_LITERALS when parsing the data graph + rdflib.NORMALIZE_LITERALS = False try: if shacl_file is None: # shacl_file cannot be None in SPARQL Remote Graph Mode